diff --git a/swh/loader/package/archive/loader.py b/swh/loader/package/archive/loader.py
index 567ba39..1f3c924 100644
--- a/swh/loader/package/archive/loader.py
+++ b/swh/loader/package/archive/loader.py
@@ -1,180 +1,166 @@
 # Copyright (C) 2019-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import datetime
 import hashlib
 import logging
 from os import path
 import string
 from typing import Any, Dict, Iterator, Optional, Sequence, Tuple, Union
 
 import attr
 import iso8601
 
 from swh.loader.package.loader import BasePackageInfo, PackageLoader, PartialExtID
 from swh.loader.package.utils import release_name
 from swh.model.model import (
     Person,
     Revision,
     RevisionType,
     Sha1Git,
     TimestampWithTimezone,
 )
 from swh.storage.interface import StorageInterface
 
 logger = logging.getLogger(__name__)
 SWH_PERSON = Person(
     name=b"Software Heritage",
     fullname=b"Software Heritage",
     email=b"robot@softwareheritage.org",
 )
 REVISION_MESSAGE = b"swh-loader-package: synthetic revision message"
 
 
 @attr.s
 class ArchivePackageInfo(BasePackageInfo):
     raw_info = attr.ib(type=Dict[str, Any])
     length = attr.ib(type=int)
     """Size of the archive file"""
     time = attr.ib(type=Union[str, datetime.datetime])
     """Timestamp of the archive file on the server"""
     version = attr.ib(type=str)
 
     # default format for gnu
     MANIFEST_FORMAT = string.Template("$time $length $version $url")
 
     def extid(self, manifest_format: Optional[string.Template] = None) -> PartialExtID:
         """Returns a unique intrinsic identifier of this package info
 
         ``manifest_format`` allows overriding the class' default MANIFEST_FORMAT"""
         manifest_format = manifest_format or self.MANIFEST_FORMAT
         # TODO: use parsed attributes instead of self.raw_info
         manifest = manifest_format.substitute(
             {k: str(v) for (k, v) in self.raw_info.items()}
         )
         return (self.EXTID_TYPE, hashlib.sha256(manifest.encode()).digest())
 
     @classmethod
     def from_metadata(cls, a_metadata: Dict[str, Any]) -> "ArchivePackageInfo":
         url = a_metadata["url"]
         filename = a_metadata.get("filename")
         return cls(
             url=url,
             filename=filename if filename else path.split(url)[-1],
             raw_info=a_metadata,
             length=a_metadata["length"],
             time=a_metadata["time"],
             version=a_metadata["version"],
         )
 
 
 class ArchiveLoader(PackageLoader[ArchivePackageInfo]):
     """Load archive origin's artifact files into swh archive
 
     """
 
     visit_type = "tar"
 
     def __init__(
         self,
         storage: StorageInterface,
         url: str,
         artifacts: Sequence[Dict[str, Any]],
         extid_manifest_format: Optional[str] = None,
         max_content_size: Optional[int] = None,
     ):
         f"""Loader constructor.
 
         For now, this is the lister's task output.
 
         Args:
             url: Origin url
             artifacts: List of artifact information with keys:
 
                - **time**: last modification time as either isoformat date
                  string or timestamp
 
                - **url**: the artifact url to retrieve filename
 
                - **filename**: optionally, the file's name
 
                - **version**: artifact's version
 
                - **length**: artifact's length
 
             extid_manifest_format: template string used to format a manifest,
                 which is hashed to get the extid of a package.
                 Defaults to {ArchivePackageInfo.MANIFEST_FORMAT!r}
 
         """
         super().__init__(storage=storage, url=url, max_content_size=max_content_size)
         self.artifacts = artifacts  # assume order is enforced in the lister
         self.extid_manifest_format = (
             None
             if extid_manifest_format is None
             else string.Template(extid_manifest_format)
         )
 
     def get_versions(self) -> Sequence[str]:
         versions = []
         for archive in self.artifacts:
             v = archive.get("version")
             if v:
                 versions.append(v)
         return versions
 
     def get_default_version(self) -> str:
         # It's the most recent, so for this loader, it's the last one
         return self.artifacts[-1]["version"]
 
     def get_package_info(
         self, version: str
     ) -> Iterator[Tuple[str, ArchivePackageInfo]]:
         for a_metadata in self.artifacts:
             p_info = ArchivePackageInfo.from_metadata(a_metadata)
             if version == p_info.version:
                 # FIXME: this code assumes we have only 1 artifact per
                 # versioned package
                 yield release_name(version), p_info
 
     def new_packageinfo_to_extid(
         self, p_info: ArchivePackageInfo
     ) -> Optional[PartialExtID]:
         return p_info.extid(manifest_format=self.extid_manifest_format)
 
-    def known_artifact_to_extid(self, known_artifact: Dict) -> Optional[PartialExtID]:
-        known_artifact_info = ArchivePackageInfo.from_metadata(
-            known_artifact["extrinsic"]["raw"]
-        )
-        return known_artifact_info.extid(manifest_format=self.extid_manifest_format)
-
     def build_revision(
         self, p_info: ArchivePackageInfo, uncompressed_path: str, directory: Sha1Git
     ) -> Optional[Revision]:
         time = p_info.time  # assume it's a timestamp
         if isinstance(time, str):  # otherwise, assume it's a parsable date
             parsed_time = iso8601.parse_date(time)
         else:
             parsed_time = time
         normalized_time = TimestampWithTimezone.from_datetime(parsed_time)
         return Revision(
             type=RevisionType.TAR,
             message=REVISION_MESSAGE,
             date=normalized_time,
             author=SWH_PERSON,
             committer=SWH_PERSON,
             committer_date=normalized_time,
             parents=(),
             directory=directory,
             synthetic=True,
-            metadata={
-                "intrinsic": {},
-                "extrinsic": {
-                    "provider": self.url,
-                    "when": self.visit_date.isoformat(),
-                    "raw": p_info.raw_info,
-                },
-            },
         )
diff --git a/swh/loader/package/archive/tests/test_archive.py b/swh/loader/package/archive/tests/test_archive.py
index c6d422b..1e475cc 100644
--- a/swh/loader/package/archive/tests/test_archive.py
+++ b/swh/loader/package/archive/tests/test_archive.py
@@ -1,375 +1,343 @@
 # Copyright (C) 2019-2021 The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import hashlib
 import string
 
 import attr
 import pytest
 
 from swh.loader.package.archive.loader import ArchiveLoader, ArchivePackageInfo
-from swh.loader.package.tests.common import check_metadata_paths
 from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
 from swh.model.hashutil import hash_to_bytes
 from swh.model.model import Snapshot, SnapshotBranch, TargetType
 
 URL = "https://ftp.gnu.org/gnu/8sync/"
 GNU_ARTIFACTS = [
     {
         "time": 944729610,
         "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz",
         "length": 221837,
         "filename": "8sync-0.1.0.tar.gz",
         "version": "0.1.0",
     }
 ]
 
 _expected_new_contents_first_visit = [
     "e9258d81faf5881a2f96a77ba609396f82cb97ad",
     "1170cf105b04b7e2822a0e09d2acf71da7b9a130",
     "fbd27c3f41f2668624ffc80b7ba5db9b92ff27ac",
     "0057bec9b5422aff9256af240b177ac0e3ac2608",
     "2b8d0d0b43a1078fc708930c8ddc2956a86c566e",
     "27de3b3bc6545d2a797aeeb4657c0e215a0c2e55",
     "2e6db43f5cd764e677f416ff0d0c78c7a82ef19b",
     "ae9be03bd2a06ed8f4f118d3fe76330bb1d77f62",
     "edeb33282b2bffa0e608e9d2fd960fd08093c0ea",
     "d64e64d4c73679323f8d4cde2643331ba6c20af9",
     "7a756602914be889c0a2d3952c710144b3e64cb0",
     "84fb589b554fcb7f32b806951dcf19518d67b08f",
     "8624bcdae55baeef00cd11d5dfcfa60f68710a02",
     "e08441aeab02704cfbd435d6445f7c072f8f524e",
     "f67935bc3a83a67259cda4b2d43373bd56703844",
     "809788434b433eb2e3cfabd5d591c9a659d5e3d8",
     "7d7c6c8c5ebaeff879f61f37083a3854184f6c41",
     "b99fec102eb24bffd53ab61fc30d59e810f116a2",
     "7d149b28eaa228b3871c91f0d5a95a2fa7cb0c68",
     "f0c97052e567948adf03e641301e9983c478ccff",
     "7fb724242e2b62b85ca64190c31dcae5303e19b3",
     "4f9709e64a9134fe8aefb36fd827b84d8b617ab5",
     "7350628ccf194c2c3afba4ac588c33e3f3ac778d",
     "0bb892d9391aa706dc2c3b1906567df43cbe06a2",
     "49d4c0ce1a16601f1e265d446b6c5ea6b512f27c",
     "6b5cc594ac466351450f7f64a0b79fdaf4435ad3",
     "3046e5d1f70297e2a507b98224b6222c9688d610",
     "1572607d456d7f633bc6065a2b3048496d679a31",
 ]
 
 _expected_new_directories_first_visit = [
     "daabc65ec75d487b1335ffc101c0ac11c803f8fc",
     "263be23b4a8101d3ad0d9831319a3e0f2b065f36",
     "7f6e63ba6eb3e2236f65892cd822041f1a01dd5c",
     "4db0a3ecbc976083e2dac01a62f93729698429a3",
     "dfef1c80e1098dd5deda664bb44a9ab1f738af13",
     "eca971d346ea54d95a6e19d5051f900237fafdaa",
     "3aebc29ed1fccc4a6f2f2010fb8e57882406b528",
 ]
 
 _expected_new_revisions_first_visit = {
     "44183488c0774ce3c957fa19ba695cf18a4a42b3": (
         "3aebc29ed1fccc4a6f2f2010fb8e57882406b528"
     )
 }
 
 
 def test_archive_visit_with_no_artifact_found(swh_storage, requests_mock_datadir):
     url = URL
     unknown_artifact_url = "https://ftp.g.o/unknown/8sync-0.1.0.tar.gz"
     loader = ArchiveLoader(
         swh_storage,
         url,
         artifacts=[
             {
                 "time": 944729610,
                 "url": unknown_artifact_url,  # unknown artifact
                 "length": 221837,
                 "filename": "8sync-0.1.0.tar.gz",
                 "version": "0.1.0",
             }
         ],
     )
 
     actual_load_status = loader.load()
     assert actual_load_status["status"] == "uneventful"
     assert actual_load_status["snapshot_id"] is not None
     stats = get_stats(swh_storage)
 
     assert {
         "content": 0,
         "directory": 0,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 0,
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     assert_last_visit_matches(swh_storage, url, status="partial", type="tar")
 
 
-def test_archive_check_revision_metadata_structure(swh_storage, requests_mock_datadir):
-    loader = ArchiveLoader(swh_storage, URL, artifacts=GNU_ARTIFACTS)
-
-    actual_load_status = loader.load()
-    assert actual_load_status["status"] == "eventful"
-    assert actual_load_status["snapshot_id"] is not None
-
-    assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
-
-    expected_revision_id = hash_to_bytes("44183488c0774ce3c957fa19ba695cf18a4a42b3")
-    revision = swh_storage.revision_get([expected_revision_id])[0]
-    assert revision is not None
-
-    check_metadata_paths(
-        revision.metadata,
-        paths=[
-            ("intrinsic", dict),
-            ("extrinsic.provider", str),
-            ("extrinsic.when", str),
-            ("extrinsic.raw", dict),
-            ("original_artifact", list),
-        ],
-    )
-
-    for original_artifact in revision.metadata["original_artifact"]:
-        check_metadata_paths(
-            original_artifact,
-            paths=[("filename", str), ("length", int), ("checksums", dict),],
-        )
-
-
 def test_archive_visit_with_release_artifact_no_prior_visit(
     swh_storage, requests_mock_datadir
 ):
     """With no prior visit, load a gnu project ends up with 1 snapshot
 
     """
     loader = ArchiveLoader(swh_storage, URL, artifacts=GNU_ARTIFACTS)
 
     actual_load_status = loader.load()
     assert actual_load_status["status"] == "eventful"
 
     expected_snapshot_first_visit_id = hash_to_bytes(
         "c419397fd912039825ebdbea378bc6283f006bf5"
     )
 
     assert (
         hash_to_bytes(actual_load_status["snapshot_id"])
         == expected_snapshot_first_visit_id
     )
 
     assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
 
     stats = get_stats(swh_storage)
     assert {
         "content": len(_expected_new_contents_first_visit),
         "directory": len(_expected_new_directories_first_visit),
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": len(_expected_new_revisions_first_visit),
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     expected_contents = map(hash_to_bytes, _expected_new_contents_first_visit)
     assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
 
     expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit)
     assert list(swh_storage.directory_missing(expected_dirs)) == []
 
     expected_revs = map(hash_to_bytes, _expected_new_revisions_first_visit)
     assert list(swh_storage.revision_missing(expected_revs)) == []
 
     expected_snapshot = Snapshot(
         id=expected_snapshot_first_visit_id,
         branches={
             b"HEAD": SnapshotBranch(
                 target_type=TargetType.ALIAS, target=b"releases/0.1.0",
             ),
             b"releases/0.1.0": SnapshotBranch(
                 target_type=TargetType.REVISION,
                 target=hash_to_bytes("44183488c0774ce3c957fa19ba695cf18a4a42b3"),
             ),
         },
     )
 
     check_snapshot(expected_snapshot, swh_storage)
 
 
 def test_archive_2_visits_without_change(swh_storage, requests_mock_datadir):
     """With no prior visit, load a gnu project ends up with 1 snapshot
 
     """
     url = URL
     loader = ArchiveLoader(swh_storage, url, artifacts=GNU_ARTIFACTS)
 
     actual_load_status = loader.load()
     assert actual_load_status["status"] == "eventful"
     assert actual_load_status["snapshot_id"] is not None
 
     assert_last_visit_matches(swh_storage, url, status="full", type="tar")
 
     actual_load_status2 = loader.load()
     assert actual_load_status2["status"] == "uneventful"
     assert actual_load_status2["snapshot_id"] is not None
     assert actual_load_status["snapshot_id"] == actual_load_status2["snapshot_id"]
 
     assert_last_visit_matches(swh_storage, url, status="full", type="tar")
 
     urls = [
         m.url
         for m in requests_mock_datadir.request_history
         if m.url.startswith("https://ftp.gnu.org")
     ]
     assert len(urls) == 1
 
 
 def test_archive_2_visits_with_new_artifact(swh_storage, requests_mock_datadir):
     """With no prior visit, load a gnu project ends up with 1 snapshot
 
     """
     url = URL
     artifact1 = GNU_ARTIFACTS[0]
     loader = ArchiveLoader(swh_storage, url, [artifact1])
 
     actual_load_status = loader.load()
     assert actual_load_status["status"] == "eventful"
     assert actual_load_status["snapshot_id"] is not None
 
     assert_last_visit_matches(swh_storage, url, status="full", type="tar")
 
     stats = get_stats(swh_storage)
     assert {
         "content": len(_expected_new_contents_first_visit),
         "directory": len(_expected_new_directories_first_visit),
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": len(_expected_new_revisions_first_visit),
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     urls = [
         m.url
         for m in requests_mock_datadir.request_history
         if m.url.startswith("https://ftp.gnu.org")
     ]
     assert len(urls) == 1
 
     artifact2 = {
         "time": 1480991830,
         "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz",
         "length": 238466,
         "filename": "8sync-0.2.0.tar.gz",
         "version": "0.2.0",
     }
 
     loader2 = ArchiveLoader(swh_storage, url, [artifact1, artifact2])
     stats2 = get_stats(swh_storage)
     assert stats == stats2  # ensure we share the storage
 
     actual_load_status2 = loader2.load()
     assert actual_load_status2["status"] == "eventful"
     assert actual_load_status2["snapshot_id"] is not None
 
     stats2 = get_stats(swh_storage)
     assert {
         "content": len(_expected_new_contents_first_visit) + 14,
         "directory": len(_expected_new_directories_first_visit) + 8,
         "origin": 1,
         "origin_visit": 1 + 1,
         "release": 0,
         "revision": len(_expected_new_revisions_first_visit) + 1,
         "skipped_content": 0,
         "snapshot": 1 + 1,
     } == stats2
 
     assert_last_visit_matches(swh_storage, url, status="full", type="tar")
 
     urls = [
         m.url
         for m in requests_mock_datadir.request_history
         if m.url.startswith("https://ftp.gnu.org")
     ]
     # 1 artifact (2nd time no modification) + 1 new artifact
     assert len(urls) == 2
 
 
 def test_archive_2_visits_without_change_not_gnu(swh_storage, requests_mock_datadir):
     """Load a project archive (not gnu) ends up with 1 snapshot
 
     """
     url = "https://something.else.org/8sync/"
     artifacts = [  # this is not a gnu artifact
         {
             "time": "1999-12-09T09:53:30+00:00",  # it's also not a timestamp
             "sha256": "d5d1051e59b2be6f065a9fc6aedd3a391e44d0274b78b9bb4e2b57a09134dbe4",  # noqa
             # keep a gnu artifact reference to avoid adding other test files
             "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz",
             "length": 238466,
             "filename": "8sync-0.2.0.tar.gz",
             "version": "0.2.0",
         }
     ]
 
     # Here the loader defines the id_keys to use for existence in the snapshot
     # It's not the default archive loader which
     loader = ArchiveLoader(
         swh_storage,
         url,
         artifacts=artifacts,
         extid_manifest_format="$sha256 $length $url",
     )
 
     actual_load_status = loader.load()
     assert actual_load_status["status"] == "eventful"
     assert actual_load_status["snapshot_id"] is not None
     assert_last_visit_matches(swh_storage, url, status="full", type="tar")
 
     actual_load_status2 = loader.load()
     assert actual_load_status2["status"] == "uneventful"
     assert actual_load_status2["snapshot_id"] == actual_load_status["snapshot_id"]
     assert_last_visit_matches(swh_storage, url, status="full", type="tar")
 
     urls = [
         m.url
         for m in requests_mock_datadir.request_history
         if m.url.startswith("https://ftp.gnu.org")
     ]
     assert len(urls) == 1
 
 
 def test_archive_extid():
     """Compute primary key should return the right identity
 
     """
 
     @attr.s
     class TestPackageInfo(ArchivePackageInfo):
         a = attr.ib()
         b = attr.ib()
 
     metadata = GNU_ARTIFACTS[0]
 
     p_info = TestPackageInfo(
         raw_info={**metadata, "a": 1, "b": 2}, a=1, b=2, **metadata,
     )
 
     for manifest_format, expected_manifest in [
         (string.Template("$a $b"), b"1 2"),
         (string.Template(""), b""),
         (None, "{time} {length} {version} {url}".format(**metadata).encode()),
     ]:
         actual_id = p_info.extid(manifest_format=manifest_format)
         assert actual_id == (
             "package-manifest-sha256",
             hashlib.sha256(expected_manifest).digest(),
         )
 
     with pytest.raises(KeyError):
         p_info.extid(manifest_format=string.Template("$a $unknown_key"))
diff --git a/swh/loader/package/cran/loader.py b/swh/loader/package/cran/loader.py
index e4191ee..63f20c0 100644
--- a/swh/loader/package/cran/loader.py
+++ b/swh/loader/package/cran/loader.py
@@ -1,197 +1,185 @@
 # Copyright (C) 2019-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import datetime
 from datetime import timezone
 import logging
 import os
 from os import path
 import re
 import string
 from typing import Any, Dict, Iterator, List, Optional, Tuple
 
 import attr
 import dateutil.parser
 from debian.deb822 import Deb822
 
-from swh.loader.package.loader import BasePackageInfo, PackageLoader, PartialExtID
+from swh.loader.package.loader import BasePackageInfo, PackageLoader
 from swh.loader.package.utils import release_name
 from swh.model.model import (
     Person,
     Revision,
     RevisionType,
     Sha1Git,
     TimestampWithTimezone,
 )
 from swh.storage.interface import StorageInterface
 
 logger = logging.getLogger(__name__)
 
 
 DATE_PATTERN = re.compile(r"^(?P<year>\d{4})-(?P<month>\d{2})$")
 
 
 @attr.s
 class CRANPackageInfo(BasePackageInfo):
     raw_info = attr.ib(type=Dict[str, Any])
     version = attr.ib(type=str)
 
     EXTID_TYPE = "cran-sha256"
     MANIFEST_FORMAT = string.Template("$version $url")
 
     @classmethod
     def from_metadata(cls, a_metadata: Dict[str, Any]) -> "CRANPackageInfo":
         url = a_metadata["url"]
         return CRANPackageInfo(
             url=url,
             filename=path.basename(url),
             raw_info=a_metadata,
             version=a_metadata["version"],
         )
 
 
 class CRANLoader(PackageLoader[CRANPackageInfo]):
     visit_type = "cran"
 
     def __init__(
         self,
         storage: StorageInterface,
         url: str,
         artifacts: List[Dict],
         max_content_size: Optional[int] = None,
     ):
         """Loader constructor.
 
         Args:
             url: Origin url to retrieve cran artifact(s) from
             artifacts: List of associated artifact for the origin url
 
         """
         super().__init__(storage=storage, url=url, max_content_size=max_content_size)
         # explicit what we consider the artifact identity
         self.artifacts = artifacts
 
     def get_versions(self) -> List[str]:
         versions = []
         for artifact in self.artifacts:
             versions.append(artifact["version"])
         return versions
 
     def get_default_version(self) -> str:
         return self.artifacts[-1]["version"]
 
     def get_package_info(self, version: str) -> Iterator[Tuple[str, CRANPackageInfo]]:
         for a_metadata in self.artifacts:
             p_info = CRANPackageInfo.from_metadata(a_metadata)
             if version == p_info.version:
                 yield release_name(version), p_info
 
-    @staticmethod
-    def known_artifact_to_extid(known_artifact: Dict) -> Optional[PartialExtID]:
-        return CRANPackageInfo.from_metadata(known_artifact["extrinsic"]["raw"]).extid()
-
     def build_revision(
         self, p_info: CRANPackageInfo, uncompressed_path: str, directory: Sha1Git
     ) -> Optional[Revision]:
         # a_metadata is empty
         metadata = extract_intrinsic_metadata(uncompressed_path)
         date = parse_date(metadata.get("Date"))
         author = Person.from_fullname(metadata.get("Maintainer", "").encode())
         version = metadata.get("Version", p_info.version)
         return Revision(
             message=version.encode("utf-8"),
             type=RevisionType.TAR,
             date=date,
             author=author,
             committer=author,
             committer_date=date,
             parents=(),
             directory=directory,
             synthetic=True,
-            metadata={
-                "intrinsic": {"tool": "DESCRIPTION", "raw": metadata,},
-                "extrinsic": {
-                    "provider": self.url,
-                    "when": self.visit_date.isoformat(),
-                    "raw": p_info.raw_info,
-                },
-            },
         )
 
 
 def parse_debian_control(filepath: str) -> Dict[str, Any]:
     """Parse debian control at filepath"""
     metadata: Dict = {}
     logger.debug("Debian control file %s", filepath)
     for paragraph in Deb822.iter_paragraphs(open(filepath, "rb")):
         logger.debug("paragraph: %s", paragraph)
         metadata.update(**paragraph)
 
     logger.debug("metadata parsed: %s", metadata)
     return metadata
 
 
 def extract_intrinsic_metadata(dir_path: str) -> Dict[str, Any]:
     """Given an uncompressed path holding the DESCRIPTION file, returns a
        DESCRIPTION parsed structure as a dict.
 
     Cran origins describes their intrinsic metadata within a DESCRIPTION file
     at the root tree of a tarball. This DESCRIPTION uses a simple file format
     called DCF, the Debian control format.
 
     The release artifact contains at their root one folder. For example:
     $ tar tvf zprint-0.0.6.tar.gz
     drwxr-xr-x root/root         0 2018-08-22 11:01 zprint-0.0.6/
     ...
 
     Args:
         dir_path (str): Path to the uncompressed directory
                         representing a release artifact from pypi.
 
     Returns:
         the DESCRIPTION parsed structure as a dict (or empty dict if missing)
 
     """
     # Retrieve the root folder of the archive
     if not os.path.exists(dir_path):
         return {}
     lst = os.listdir(dir_path)
     if len(lst) != 1:
         return {}
     project_dirname = lst[0]
     description_path = os.path.join(dir_path, project_dirname, "DESCRIPTION")
     if not os.path.exists(description_path):
         return {}
     return parse_debian_control(description_path)
 
 
 def parse_date(date: Optional[str]) -> Optional[TimestampWithTimezone]:
     """Parse a date into a datetime
 
     """
     assert not date or isinstance(date, str)
     dt: Optional[datetime.datetime] = None
     if not date:
         return None
     try:
         specific_date = DATE_PATTERN.match(date)
         if specific_date:
             year = int(specific_date.group("year"))
             month = int(specific_date.group("month"))
             dt = datetime.datetime(year, month, 1)
         else:
             dt = dateutil.parser.parse(date)
 
         if not dt.tzinfo:
             # up for discussion the timezone needs to be set or
             # normalize_timestamp is not happy: ValueError: normalize_timestamp
             # received datetime without timezone: 2001-06-08 00:00:00
             dt = dt.replace(tzinfo=timezone.utc)
     except Exception as e:
         logger.warning("Fail to parse date %s. Reason: %s", date, e)
     if dt:
         return TimestampWithTimezone.from_datetime(dt)
     else:
         return None
diff --git a/swh/loader/package/debian/loader.py b/swh/loader/package/debian/loader.py
index 015d70b..650dc6b 100644
--- a/swh/loader/package/debian/loader.py
+++ b/swh/loader/package/debian/loader.py
@@ -1,500 +1,477 @@
 # Copyright (C) 2017-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import email.utils
 import logging
 from os import path
 import re
 import subprocess
 from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple
 
 import attr
 from dateutil.parser import parse as parse_date
 from debian.changelog import Changelog
 from debian.deb822 import Dsc
 
 from swh.loader.package.loader import BasePackageInfo, PackageLoader, PartialExtID
 from swh.loader.package.utils import download, release_name
 from swh.model.hashutil import hash_to_bytes
 from swh.model.model import (
     Person,
     Revision,
     RevisionType,
     Sha1Git,
     TimestampWithTimezone,
 )
 from swh.storage.interface import StorageInterface
 
 logger = logging.getLogger(__name__)
 UPLOADERS_SPLIT = re.compile(r"(?<=\>)\s*,\s*")
 
 EXTID_TYPE = "dsc-sha256"
 
 
 class DscCountError(ValueError):
     """Raised when an unexpected number of .dsc files is seen"""
 
     pass
 
 
 @attr.s
 class DebianFileMetadata:
     md5sum = attr.ib(type=str)
     name = attr.ib(type=str)
     """Filename"""
     sha256 = attr.ib(type=str)
     size = attr.ib(type=int)
     uri = attr.ib(type=str)
     """URL of this specific file"""
 
 
 @attr.s
 class DebianPackageChangelog:
     person = attr.ib(type=Dict[str, str])
     """A dict with fields like, model.Person, except they are str instead
     of bytes, and 'email' is optional."""
     date = attr.ib(type=str)
     """Date of the changelog entry."""
     history = attr.ib(type=List[Tuple[str, str]])
     """List of tuples (package_name, version)"""
 
 
 @attr.s
 class DebianPackageInfo(BasePackageInfo):
     raw_info = attr.ib(type=Dict[str, Any])
     files = attr.ib(type=Dict[str, DebianFileMetadata])
     """Metadata of the files (.deb, .dsc, ...) of the package."""
     name = attr.ib(type=str)
     version = attr.ib(type=str)
 
     @classmethod
     def from_metadata(cls, a_metadata: Dict[str, Any], url: str) -> "DebianPackageInfo":
         return cls(
             url=url,
             filename=None,
             raw_info=a_metadata,
             files={
                 file_name: DebianFileMetadata(**file_metadata)
                 for (file_name, file_metadata) in a_metadata.get("files", {}).items()
             },
             name=a_metadata["name"],
             version=a_metadata["version"],
         )
 
     def extid(self) -> Optional[PartialExtID]:
         dsc_files = [
             file for (name, file) in self.files.items() if name.endswith(".dsc")
         ]
 
         if len(dsc_files) != 1:
             raise DscCountError(
                 f"Expected exactly one .dsc file for package {self.name}, "
                 f"got {len(dsc_files)}"
             )
 
         return (EXTID_TYPE, hash_to_bytes(dsc_files[0].sha256))
 
 
 @attr.s
 class IntrinsicPackageMetadata:
     """Metadata extracted from a package's .dsc file."""
 
     name = attr.ib(type=str)
     version = attr.ib(type=str)
     changelog = attr.ib(type=DebianPackageChangelog)
     maintainers = attr.ib(type=List[Dict[str, str]])
     """A list of dicts with fields like, model.Person, except they are str instead
     of bytes, and 'email' is optional."""
 
 
 class DebianLoader(PackageLoader[DebianPackageInfo]):
     """Load debian origins into swh archive.
 
     """
 
     visit_type = "deb"
 
     def __init__(
         self,
         storage: StorageInterface,
         url: str,
         date: str,
         packages: Mapping[str, Any],
         max_content_size: Optional[int] = None,
     ):
         """Debian Loader implementation.
 
         Args:
             url: Origin url (e.g. deb://Debian/packages/cicero)
             date: Ignored
             packages: versioned packages and associated artifacts, example::
 
               {
                 'stretch/contrib/0.7.2-3': {
                   'name': 'cicero',
                   'version': '0.7.2-3'
                   'files': {
                     'cicero_0.7.2-3.diff.gz': {
                        'md5sum': 'a93661b6a48db48d59ba7d26796fc9ce',
                        'name': 'cicero_0.7.2-3.diff.gz',
                        'sha256': 'f039c9642fe15c75bed5254315e2a29f...',
                        'size': 3964,
                        'uri': 'http://d.d.o/cicero_0.7.2-3.diff.gz',
                     },
                     'cicero_0.7.2-3.dsc': {
                       'md5sum': 'd5dac83eb9cfc9bb52a15eb618b4670a',
                       'name': 'cicero_0.7.2-3.dsc',
                       'sha256': '35b7f1048010c67adfd8d70e4961aefb...',
                       'size': 1864,
                       'uri': 'http://d.d.o/cicero_0.7.2-3.dsc',
                     },
                     'cicero_0.7.2.orig.tar.gz': {
                       'md5sum': '4353dede07c5728319ba7f5595a7230a',
                       'name': 'cicero_0.7.2.orig.tar.gz',
                       'sha256': '63f40f2436ea9f67b44e2d4bd669dbab...',
                       'size': 96527,
                       'uri': 'http://d.d.o/cicero_0.7.2.orig.tar.gz',
                     }
                   },
                 },
                 # ...
               }
 
         """
         super().__init__(storage=storage, url=url, max_content_size=max_content_size)
         self.packages = packages
 
     def get_versions(self) -> Sequence[str]:
         """Returns the keys of the packages input (e.g.
            stretch/contrib/0.7.2-3, etc...)
 
         """
         return list(self.packages.keys())
 
     def get_package_info(self, version: str) -> Iterator[Tuple[str, DebianPackageInfo]]:
         meta = self.packages[version]
         p_info = DebianPackageInfo.from_metadata(meta, url=self.url)
         yield release_name(version), p_info
 
-    def known_artifact_to_extid(self, known_artifact: Dict) -> Optional[PartialExtID]:
-        sha256 = _artifact_to_dsc_sha256(known_artifact, url=self.url)
-        if sha256 is None:
-            return None
-        return (EXTID_TYPE, hash_to_bytes(sha256))
-
-    def resolve_revision_from_artifacts(
-        self, known_artifacts: Dict, p_info: DebianPackageInfo,
-    ) -> Optional[bytes]:
-        try:
-            return super().resolve_revision_from_artifacts(known_artifacts, p_info)
-        except DscCountError:
-            # known_artifacts are corrupted, ignore them instead of crashing
-            return None
-
     def download_package(
         self, p_info: DebianPackageInfo, tmpdir: str
     ) -> List[Tuple[str, Mapping]]:
         """Contrary to other package loaders (1 package, 1 artifact),
         `p_info.files` represents the package's datafiles set to fetch:
         - <package-version>.orig.tar.gz
         - <package-version>.dsc
         - <package-version>.diff.gz
 
         This is delegated to the `download_package` function.
 
         """
         all_hashes = download_package(p_info, tmpdir)
         logger.debug("all_hashes: %s", all_hashes)
         res = []
         for hashes in all_hashes.values():
             res.append((tmpdir, hashes))
             logger.debug("res: %s", res)
         return res
 
     def uncompress(
         self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str
     ) -> str:
         logger.debug("dl_artifacts: %s", dl_artifacts)
         return extract_package(dl_artifacts, dest=dest)
 
     def build_revision(
         self, p_info: DebianPackageInfo, uncompressed_path: str, directory: Sha1Git
     ) -> Optional[Revision]:
         dsc_url, dsc_name = dsc_information(p_info)
         if not dsc_name:
             raise ValueError("dsc name for url %s should not be None" % dsc_url)
         dsc_path = path.join(path.dirname(uncompressed_path), dsc_name)
         intrinsic_metadata = get_intrinsic_package_metadata(
             p_info, dsc_path, uncompressed_path
         )
 
         logger.debug("intrinsic_metadata: %s", intrinsic_metadata)
         logger.debug("p_info: %s", p_info)
 
         msg = "Synthetic revision for Debian source package %s version %s" % (
             p_info.name,
             p_info.version,
         )
 
         author = prepare_person(intrinsic_metadata.changelog.person)
         date = TimestampWithTimezone.from_iso8601(intrinsic_metadata.changelog.date)
 
         # inspired from swh.loader.debian.converters.package_metadata_to_revision  # noqa
         return Revision(
             type=RevisionType.DSC,
             message=msg.encode("utf-8"),
             author=author,
             date=date,
             committer=author,
             committer_date=date,
             parents=(),
             directory=directory,
             synthetic=True,
-            metadata={
-                "intrinsic": {"tool": "dsc", "raw": attr.asdict(intrinsic_metadata),},
-                "extrinsic": {
-                    "provider": dsc_url,
-                    "when": self.visit_date.isoformat(),
-                    "raw": p_info.raw_info,
-                },
-            },
         )
 
 
 def _artifact_to_dsc_sha256(known_artifacts: Dict, url: str) -> Optional[str]:
     extrinsic = known_artifacts.get("extrinsic")
     if not extrinsic:
         return None
 
     known_p_info = DebianPackageInfo.from_metadata(extrinsic["raw"], url=url)
     dsc = [file for (name, file) in known_p_info.files.items() if name.endswith(".dsc")]
 
     if len(dsc) != 1:
         raise DscCountError(
             f"Expected exactly one known .dsc file for package {known_p_info.name}, "
             f"got {len(dsc)}"
         )
 
     return dsc[0].sha256
 
 
 def uid_to_person(uid: str) -> Dict[str, str]:
     """Convert an uid to a person suitable for insertion.
 
     Args:
         uid: an uid of the form "Name <email@ddress>"
 
     Returns:
         a dictionary with the following keys:
 
         - name: the name associated to the uid
         - email: the mail associated to the uid
         - fullname: the actual uid input
 
     """
     logger.debug("uid: %s", uid)
     ret = {
         "name": "",
         "email": "",
         "fullname": uid,
     }
 
     name, mail = email.utils.parseaddr(uid)
     if name and email:
         ret["name"] = name
         ret["email"] = mail
     else:
         ret["name"] = uid
     return ret
 
 
 def prepare_person(person: Mapping[str, str]) -> Person:
     """Prepare person for swh serialization...
 
     Args:
         A person dict
 
     Returns:
         A person ready for storage
 
     """
     return Person.from_dict(
         {key: value.encode("utf-8") for (key, value) in person.items()}
     )
 
 
 def download_package(p_info: DebianPackageInfo, tmpdir: Any) -> Mapping[str, Any]:
     """Fetch a source package in a temporary directory and check the checksums
     for all files.
 
     Args:
         p_info: Information on a package
         tmpdir: Where to download and extract the files to ingest
 
     Returns:
         Dict of swh hashes per filename key
 
     """
     all_hashes = {}
     for filename, fileinfo in p_info.files.items():
         uri = fileinfo.uri
         logger.debug("fileinfo: %s", fileinfo)
         extrinsic_hashes = {"sha256": fileinfo.sha256}
         logger.debug("extrinsic_hashes(%s): %s", filename, extrinsic_hashes)
         filepath, hashes = download(
             uri, dest=tmpdir, filename=filename, hashes=extrinsic_hashes
         )
         all_hashes[filename] = hashes
 
     logger.debug("all_hashes: %s", all_hashes)
     return all_hashes
 
 
 def dsc_information(p_info: DebianPackageInfo) -> Tuple[Optional[str], Optional[str]]:
     """Retrieve dsc information from a package.
 
     Args:
         p_info: Package metadata information
 
     Returns:
         Tuple of dsc file's uri, dsc's full disk path
 
     """
     dsc_name = None
     dsc_url = None
     for filename, fileinfo in p_info.files.items():
         if filename.endswith(".dsc"):
             if dsc_name:
                 raise DscCountError(
                     "Package %s_%s references several dsc files."
                     % (p_info.name, p_info.version)
                 )
             dsc_url = fileinfo.uri
             dsc_name = filename
 
     return dsc_url, dsc_name
 
 
 def extract_package(dl_artifacts: List[Tuple[str, Mapping]], dest: str) -> str:
     """Extract a Debian source package to a given directory.
 
     Note that after extraction the target directory will be the root of the
     extracted package, rather than containing it.
 
     Args:
         package: package information dictionary
         dest: directory where the package files are stored
 
     Returns:
         Package extraction directory
 
     """
     a_path = dl_artifacts[0][0]
     logger.debug("dl_artifacts: %s", dl_artifacts)
     for _, hashes in dl_artifacts:
         logger.debug("hashes: %s", hashes)
         filename = hashes["filename"]
         if filename.endswith(".dsc"):
             dsc_name = filename
             break
 
     dsc_path = path.join(a_path, dsc_name)
     destdir = path.join(dest, "extracted")
     logfile = path.join(dest, "extract.log")
     logger.debug(
         "extract Debian source package %s in %s" % (dsc_path, destdir),
         extra={"swh_type": "deb_extract", "swh_dsc": dsc_path, "swh_destdir": destdir,},
     )
 
     cmd = [
         "dpkg-source",
         "--no-copy",
         "--no-check",
         "--ignore-bad-version",
         "-x",
         dsc_path,
         destdir,
     ]
 
     try:
         with open(logfile, "w") as stdout:
             subprocess.check_call(cmd, stdout=stdout, stderr=subprocess.STDOUT)
     except subprocess.CalledProcessError as e:
         logdata = open(logfile, "r").read()
         raise ValueError(
             "dpkg-source exited with code %s: %s" % (e.returncode, logdata)
         ) from None
 
     return destdir
 
 
 def get_intrinsic_package_metadata(
     p_info: DebianPackageInfo, dsc_path: str, extracted_path: str
 ) -> IntrinsicPackageMetadata:
     """Get the package metadata from the source package at dsc_path,
     extracted in extracted_path.
 
     Args:
         p_info: the package information
         dsc_path: path to the package's dsc file
         extracted_path: the path where the package got extracted
 
     Returns:
         dict: a dictionary with the following keys:
 
         - history: list of (package_name, package_version) tuples parsed from
           the package changelog
 
     """
     with open(dsc_path, "rb") as dsc:
         parsed_dsc = Dsc(dsc)
 
     # Parse the changelog to retrieve the rest of the package information
     changelog_path = path.join(extracted_path, "debian/changelog")
     with open(changelog_path, "rb") as changelog_file:
         try:
             parsed_changelog = Changelog(changelog_file)
         except UnicodeDecodeError:
             logger.warning(
                 "Unknown encoding for changelog %s,"
                 " falling back to iso" % changelog_path,
                 extra={
                     "swh_type": "deb_changelog_encoding",
                     "swh_name": p_info.name,
                     "swh_version": str(p_info.version),
                     "swh_changelog": changelog_path,
                 },
             )
 
             # need to reset as Changelog scrolls to the end of the file
             changelog_file.seek(0)
             parsed_changelog = Changelog(changelog_file, encoding="iso-8859-15")
 
     history: List[Tuple[str, str]] = []
 
     for block in parsed_changelog:
         assert block.package is not None
         history.append((block.package, str(block.version)))
 
     changelog = DebianPackageChangelog(
         person=uid_to_person(parsed_changelog.author),
         date=parse_date(parsed_changelog.date).isoformat(),
         history=history[1:],
     )
 
     maintainers = [
         uid_to_person(parsed_dsc["Maintainer"]),
     ]
     maintainers.extend(
         uid_to_person(person)
         for person in UPLOADERS_SPLIT.split(parsed_dsc.get("Uploaders", ""))
     )
 
     return IntrinsicPackageMetadata(
         name=p_info.name,
         version=str(p_info.version),
         changelog=changelog,
         maintainers=maintainers,
     )
diff --git a/swh/loader/package/debian/tests/test_debian.py b/swh/loader/package/debian/tests/test_debian.py
index e17b9fa..5205e8b 100644
--- a/swh/loader/package/debian/tests/test_debian.py
+++ b/swh/loader/package/debian/tests/test_debian.py
@@ -1,562 +1,412 @@
 # Copyright (C) 2019-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import logging
 from os import path
-import random
 
 import pytest
 
 from swh.loader.package.debian.loader import (
     DebianLoader,
     DebianPackageChangelog,
     DebianPackageInfo,
     IntrinsicPackageMetadata,
     download_package,
     dsc_information,
     extract_package,
     get_intrinsic_package_metadata,
     prepare_person,
     uid_to_person,
 )
 from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
 from swh.model.hashutil import hash_to_bytes
 from swh.model.model import Person, Snapshot, SnapshotBranch, TargetType
 
 logger = logging.getLogger(__name__)
 
 
 URL = "deb://Debian/packages/cicero"
 
 PACKAGE_FILES = {
     "name": "cicero",
     "version": "0.7.2-3",
     "files": {
         "cicero_0.7.2-3.diff.gz": {
             "md5sum": "a93661b6a48db48d59ba7d26796fc9ce",
             "name": "cicero_0.7.2-3.diff.gz",
             "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c",  # noqa
             "size": 3964,
             "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.diff.gz",  # noqa
         },
         "cicero_0.7.2-3.dsc": {
             "md5sum": "d5dac83eb9cfc9bb52a15eb618b4670a",
             "name": "cicero_0.7.2-3.dsc",
             "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03",  # noqa
             "size": 1864,
             "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc",  # noqa
         },  # noqa
         "cicero_0.7.2.orig.tar.gz": {
             "md5sum": "4353dede07c5728319ba7f5595a7230a",
             "name": "cicero_0.7.2.orig.tar.gz",
             "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786",  # noqa
             "size": 96527,
             "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz",  # noqa
         },
     },
 }
 
 PACKAGE_FILES2 = {
     "name": "cicero",
     "version": "0.7.2-4",
     "files": {
         "cicero_0.7.2-4.diff.gz": {
             "md5sum": "1e7e6fc4a59d57c98082a3af78145734",
             "name": "cicero_0.7.2-4.diff.gz",
             "sha256": "2e6fa296ee7005473ff58d0971f4fd325617b445671480e9f2cfb738d5dbcd01",  # noqa
             "size": 4038,
             "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.diff.gz",  # noqa
         },
         "cicero_0.7.2-4.dsc": {
             "md5sum": "1a6c8855a73b4282bb31d15518f18cde",
             "name": "cicero_0.7.2-4.dsc",
             "sha256": "913ee52f7093913420de5cbe95d63cfa817f1a1daf997961149501894e754f8b",  # noqa
             "size": 1881,
             "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.dsc",  # noqa
         },  # noqa
         "cicero_0.7.2.orig.tar.gz": {
             "md5sum": "4353dede07c5728319ba7f5595a7230a",
             "name": "cicero_0.7.2.orig.tar.gz",
             "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786",  # noqa
             "size": 96527,
             "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz",  # noqa
         },
     },
 }
 
 
 PACKAGE_PER_VERSION = {
     "stretch/contrib/0.7.2-3": PACKAGE_FILES,
 }
 
 
 PACKAGES_PER_VERSION = {
     "stretch/contrib/0.7.2-3": PACKAGE_FILES,
     "buster/contrib/0.7.2-4": PACKAGE_FILES2,
 }
 
 
 def test_debian_first_visit(swh_storage, requests_mock_datadir):
     """With no prior visit, load a gnu project ends up with 1 snapshot
 
     """
     loader = DebianLoader(
         swh_storage,
         URL,
         date="2019-10-12T05:58:09.165557+00:00",
         packages=PACKAGE_PER_VERSION,
     )
 
     actual_load_status = loader.load()
     expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4"
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id,
     }
 
     assert_last_visit_matches(swh_storage, URL, status="full", type="deb")
 
     stats = get_stats(swh_storage)
     assert {
         "content": 42,
         "directory": 2,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 1,  # all artifacts under 1 revision
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     expected_snapshot = Snapshot(
         id=hash_to_bytes(expected_snapshot_id),
         branches={
             b"releases/stretch/contrib/0.7.2-3": SnapshotBranch(
                 target_type=TargetType.REVISION,
                 target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"),
             )
         },
     )  # different than the previous loader as no release is done
 
     check_snapshot(expected_snapshot, swh_storage)
 
 
 def test_debian_first_visit_then_another_visit(swh_storage, requests_mock_datadir):
     """With no prior visit, load a debian project ends up with 1 snapshot
 
     """
     loader = DebianLoader(
         swh_storage,
         URL,
         date="2019-10-12T05:58:09.165557+00:00",
         packages=PACKAGE_PER_VERSION,
     )
 
     actual_load_status = loader.load()
 
     expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4"
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id,
     }
 
     assert_last_visit_matches(swh_storage, URL, status="full", type="deb")
 
     stats = get_stats(swh_storage)
     assert {
         "content": 42,
         "directory": 2,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 1,  # all artifacts under 1 revision
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     expected_snapshot = Snapshot(
         id=hash_to_bytes(expected_snapshot_id),
         branches={
             b"releases/stretch/contrib/0.7.2-3": SnapshotBranch(
                 target_type=TargetType.REVISION,
                 target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"),
             )
         },
     )  # different than the previous loader as no release is done
 
     check_snapshot(expected_snapshot, swh_storage)
 
     # No change in between load
     actual_load_status2 = loader.load()
     assert actual_load_status2["status"] == "uneventful"
     assert_last_visit_matches(swh_storage, URL, status="full", type="deb")
 
     stats2 = get_stats(swh_storage)
     assert {
         "content": 42 + 0,
         "directory": 2 + 0,
         "origin": 1,
         "origin_visit": 1 + 1,  # a new visit occurred
         "release": 0,
         "revision": 1,
         "skipped_content": 0,
         "snapshot": 1,  # same snapshot across 2 visits
     } == stats2
 
     urls = [
         m.url
         for m in requests_mock_datadir.request_history
         if m.url.startswith("http://deb.debian.org")
     ]
     # visited each package artifact twice across 2 visits
     assert len(urls) == len(set(urls))
 
 
 def test_debian_uid_to_person():
     uid = "Someone Name <someone@orga.org>"
     actual_person = uid_to_person(uid)
 
     assert actual_person == {
         "name": "Someone Name",
         "email": "someone@orga.org",
         "fullname": uid,
     }
 
 
 def test_debian_prepare_person():
     actual_author = prepare_person(
         {
             "name": "Someone Name",
             "email": "someone@orga.org",
             "fullname": "Someone Name <someone@orga.org>",
         }
     )
 
     assert actual_author == Person(
         name=b"Someone Name",
         email=b"someone@orga.org",
         fullname=b"Someone Name <someone@orga.org>",
     )
 
 
 def test_debian_download_package(datadir, tmpdir, requests_mock_datadir):
     tmpdir = str(tmpdir)  # py3.5 work around (LocalPath issue)
     p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL)
     all_hashes = download_package(p_info, tmpdir)
     assert all_hashes == {
         "cicero_0.7.2-3.diff.gz": {
             "checksums": {
                 "sha1": "0815282053f21601b0ec4adf7a8fe47eace3c0bc",
                 "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c",  # noqa
             },
             "filename": "cicero_0.7.2-3.diff.gz",
             "length": 3964,
             "url": (
                 "http://deb.debian.org/debian/pool/contrib/c/cicero/"
                 "cicero_0.7.2-3.diff.gz"
             ),
         },
         "cicero_0.7.2-3.dsc": {
             "checksums": {
                 "sha1": "abbec4e8efbbc80278236e1dd136831eac08accd",
                 "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03",  # noqa
             },
             "filename": "cicero_0.7.2-3.dsc",
             "length": 1864,
             "url": (
                 "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc"
             ),
         },
         "cicero_0.7.2.orig.tar.gz": {
             "checksums": {
                 "sha1": "a286efd63fe2c9c9f7bb30255c3d6fcdcf390b43",
                 "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786",  # noqa
             },
             "filename": "cicero_0.7.2.orig.tar.gz",
             "length": 96527,
             "url": (
                 "http://deb.debian.org/debian/pool/contrib/c/cicero/"
                 "cicero_0.7.2.orig.tar.gz"
             ),
         },
     }
 
 
 def test_debian_dsc_information_ok():
     fname = "cicero_0.7.2-3.dsc"
     p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL)
     dsc_url, dsc_name = dsc_information(p_info)
 
     assert dsc_url == PACKAGE_FILES["files"][fname]["uri"]
     assert dsc_name == PACKAGE_FILES["files"][fname]["name"]
 
 
 def test_debian_dsc_information_not_found():
     fname = "cicero_0.7.2-3.dsc"
     p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL)
     p_info.files.pop(fname)
 
     dsc_url, dsc_name = dsc_information(p_info)
 
     assert dsc_url is None
     assert dsc_name is None
 
 
 def test_debian_dsc_information_too_many_dsc_entries():
     # craft an extra dsc file
     fname = "cicero_0.7.2-3.dsc"
     p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL)
     data = p_info.files[fname]
     fname2 = fname.replace("cicero", "ciceroo")
     p_info.files[fname2] = data
 
     with pytest.raises(
         ValueError,
         match="Package %s_%s references several dsc"
         % (PACKAGE_FILES["name"], PACKAGE_FILES["version"]),
     ):
         dsc_information(p_info)
 
 
 def test_debian_get_intrinsic_package_metadata(
     requests_mock_datadir, datadir, tmp_path
 ):
     tmp_path = str(tmp_path)  # py3.5 compat.
     p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL)
 
     logger.debug("p_info: %s", p_info)
 
     # download the packages
     all_hashes = download_package(p_info, tmp_path)
 
     # Retrieve information from package
     _, dsc_name = dsc_information(p_info)
 
     dl_artifacts = [(tmp_path, hashes) for hashes in all_hashes.values()]
 
     # Extract information from package
     extracted_path = extract_package(dl_artifacts, tmp_path)
 
     # Retrieve information on package
     dsc_path = path.join(path.dirname(extracted_path), dsc_name)
     actual_package_info = get_intrinsic_package_metadata(
         p_info, dsc_path, extracted_path
     )
 
     logger.debug("actual_package_info: %s", actual_package_info)
 
     assert actual_package_info == IntrinsicPackageMetadata(
         changelog=DebianPackageChangelog(
             date="2014-10-19T16:52:35+02:00",
             history=[
                 ("cicero", "0.7.2-2"),
                 ("cicero", "0.7.2-1"),
                 ("cicero", "0.7-1"),
             ],
             person={
                 "email": "sthibault@debian.org",
                 "fullname": "Samuel Thibault <sthibault@debian.org>",
                 "name": "Samuel Thibault",
             },
         ),
         maintainers=[
             {
                 "email": "debian-accessibility@lists.debian.org",
                 "fullname": "Debian Accessibility Team "
                 "<debian-accessibility@lists.debian.org>",
                 "name": "Debian Accessibility Team",
             },
             {
                 "email": "sthibault@debian.org",
                 "fullname": "Samuel Thibault <sthibault@debian.org>",
                 "name": "Samuel Thibault",
             },
         ],
         name="cicero",
         version="0.7.2-3",
     )
 
 
 def test_debian_multiple_packages(swh_storage, requests_mock_datadir):
     loader = DebianLoader(
         swh_storage,
         URL,
         date="2019-10-12T05:58:09.165557+00:00",
         packages=PACKAGES_PER_VERSION,
     )
 
     actual_load_status = loader.load()
     expected_snapshot_id = "defc19021187f3727293121fcf6c5c82cb923604"
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id,
     }
 
     assert_last_visit_matches(swh_storage, URL, status="full", type="deb")
 
     expected_snapshot = Snapshot(
         id=hash_to_bytes(expected_snapshot_id),
         branches={
             b"releases/stretch/contrib/0.7.2-3": SnapshotBranch(
                 target_type=TargetType.REVISION,
                 target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"),
             ),
             b"releases/buster/contrib/0.7.2-4": SnapshotBranch(
                 target_type=TargetType.REVISION,
                 target=hash_to_bytes("8224139c274c984147ef4b09aa0e462c55a10bd3"),
             ),
         },
     )
 
     check_snapshot(expected_snapshot, swh_storage)
-
-
-def test_debian_resolve_revision_from_artifacts_edge_cases():
-    """Solving revision with empty data will result in unknown revision
-
-    """
-    loader = DebianLoader(None, None, None, None)
-    empty_artifact = {
-        "name": PACKAGE_FILES["name"],
-        "version": PACKAGE_FILES["version"],
-    }
-    for package_artifacts in [empty_artifact, PACKAGE_FILES]:
-        p_info = DebianPackageInfo.from_metadata(package_artifacts, url=URL)
-        actual_revision = loader.resolve_revision_from_artifacts({}, p_info)
-        assert actual_revision is None
-
-    for known_artifacts in [{}, PACKAGE_FILES]:
-        actual_revision = loader.resolve_revision_from_artifacts(
-            known_artifacts, DebianPackageInfo.from_metadata(empty_artifact, url=URL)
-        )
-        assert actual_revision is None
-
-    known_package_artifacts = {
-        b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07": {
-            "extrinsic": {
-                # empty
-            },
-            # ... removed the unnecessary intermediary data
-        }
-    }
-    assert not loader.resolve_revision_from_artifacts(
-        known_package_artifacts, DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL)
-    )
-
-
-def test_debian_resolve_revision_from_artifacts_edge_cases_hit_and_miss():
-    """Solving revision with inconsistent data will result in unknown revision
-
-    """
-    loader = DebianLoader(None, None, None, None)
-    artifact_metadata = PACKAGE_FILES2
-    p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL)
-    expected_revision_id = (
-        b"(\x08\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xff\x85\x85O\xfe\xcf\x07"  # noqa
-    )
-    known_package_artifacts = {
-        expected_revision_id: {
-            "extrinsic": {"raw": PACKAGE_FILES,},
-            # ... removed the unnecessary intermediary data
-        }
-    }
-
-    actual_revision = loader.resolve_revision_from_artifacts(
-        known_package_artifacts, p_info
-    )
-
-    assert actual_revision is None
-
-
-def test_debian_resolve_revision_from_artifacts():
-    """Solving revision with consistent data will solve the revision
-
-    """
-    loader = DebianLoader(None, None, None, None)
-    artifact_metadata = PACKAGE_FILES
-    p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL)
-    expected_revision_id = (
-        b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07"  # noqa
-    )
-
-    files = artifact_metadata["files"]
-    # shuffling dict's keys
-    keys = list(files.keys())
-    random.shuffle(keys)
-    package_files = {
-        "name": PACKAGE_FILES["name"],
-        "version": PACKAGE_FILES["version"],
-        "files": {k: files[k] for k in keys},
-    }
-
-    known_package_artifacts = {
-        expected_revision_id: {
-            "extrinsic": {"raw": package_files,},
-            # ... removed the unnecessary intermediary data
-        }
-    }
-
-    actual_revision = loader.resolve_revision_from_artifacts(
-        known_package_artifacts, p_info
-    )
-
-    assert actual_revision == expected_revision_id
-
-
-def test_debian_resolve_revision_from_artifacts_corrupt_known_artifact():
-    """To many or not enough .dsc files in the known_artifacts dict"""
-    loader = DebianLoader(None, None, None, None)
-    artifact_metadata = PACKAGE_FILES
-    p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL)
-    expected_revision_id = (
-        b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07"
-    )
-
-    files = dict(artifact_metadata["files"])
-    package_files = {
-        "name": PACKAGE_FILES["name"],
-        "version": PACKAGE_FILES["version"],
-        "files": files,
-    }
-
-    known_package_artifacts = {
-        expected_revision_id: {
-            "extrinsic": {"raw": package_files,},
-            # ... removed the unnecessary intermediary data
-        }
-    }
-
-    # Too many .dsc
-    files["another.dsc"] = files["cicero_0.7.2-3.dsc"]
-    assert (
-        loader.resolve_revision_from_artifacts(known_package_artifacts, p_info) is None
-    )
-
-    # Not enough .dsc
-    del files["another.dsc"]
-    del files["cicero_0.7.2-3.dsc"]
-    assert (
-        loader.resolve_revision_from_artifacts(known_package_artifacts, p_info) is None
-    )
-
-
-def test_debian_resolve_revision_from_artifacts_corrupt_new_artifact():
-    loader = DebianLoader(None, None, None, None)
-    artifact_metadata = PACKAGE_FILES
-
-    files = PACKAGE_FILES["files"]
-    files = {**files, "another.dsc": files["cicero_0.7.2-3.dsc"]}
-    artifact_metadata = {**PACKAGE_FILES, "files": files}
-
-    # Too many .dsc
-    files["another.dsc"] = files["cicero_0.7.2-3.dsc"]
-    p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL)
-    assert loader.resolve_revision_from_artifacts(PACKAGE_FILES, p_info) is None
-
-    # Not enough .dsc
-    del files["another.dsc"]
-    del files["cicero_0.7.2-3.dsc"]
-    p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL)
-    assert loader.resolve_revision_from_artifacts(PACKAGE_FILES, p_info) is None
diff --git a/swh/loader/package/deposit/loader.py b/swh/loader/package/deposit/loader.py
index 7e5c6f4..002f78d 100644
--- a/swh/loader/package/deposit/loader.py
+++ b/swh/loader/package/deposit/loader.py
@@ -1,388 +1,381 @@
 # Copyright (C) 2019-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import datetime
 from datetime import timezone
 import json
 import logging
 from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union
 
 import attr
 import requests
 
 from swh.core.config import load_from_envvar
 from swh.loader.core.loader import DEFAULT_CONFIG
 from swh.loader.package.loader import (
     BasePackageInfo,
     PackageLoader,
     RawExtrinsicMetadataCore,
 )
 from swh.loader.package.utils import cached_method, download
 from swh.model.hashutil import hash_to_bytes, hash_to_hex
 from swh.model.model import (
     MetadataAuthority,
     MetadataAuthorityType,
     MetadataFetcher,
     Person,
     Revision,
     RevisionType,
     Sha1Git,
     TimestampWithTimezone,
 )
 from swh.storage.algos.snapshot import snapshot_get_all_branches
 from swh.storage.interface import StorageInterface
 
 logger = logging.getLogger(__name__)
 
 
 def now() -> datetime.datetime:
     return datetime.datetime.now(tz=timezone.utc)
 
 
 @attr.s
 class DepositPackageInfo(BasePackageInfo):
     filename = attr.ib(type=str)  # instead of Optional[str]
     raw_info = attr.ib(type=Dict[str, Any])
 
     author_date = attr.ib(type=datetime.datetime)
     """codemeta:dateCreated if any, deposit completed_date otherwise"""
     commit_date = attr.ib(type=datetime.datetime)
     """codemeta:datePublished if any, deposit completed_date otherwise"""
     client = attr.ib(type=str)
     id = attr.ib(type=int)
     """Internal ID of the deposit in the deposit DB"""
     collection = attr.ib(type=str)
     """The collection in the deposit; see SWORD specification."""
     author = attr.ib(type=Person)
     committer = attr.ib(type=Person)
     revision_parents = attr.ib(type=Tuple[Sha1Git, ...])
     """Revisions created from previous deposits, that will be used as parents of the
     revision created for this deposit."""
 
     @classmethod
     def from_metadata(
         cls, metadata: Dict[str, Any], url: str, filename: str
     ) -> "DepositPackageInfo":
         # Note:
         # `date` and `committer_date` are always transmitted by the deposit read api
         # which computes itself the values. The loader needs to use those to create the
         # revision.
 
         all_metadata_raw: List[str] = metadata["metadata_raw"]
         raw_info = {
             "origin": metadata["origin"],
             "origin_metadata": {
                 "metadata": metadata["metadata_dict"],
                 "provider": metadata["provider"],
                 "tool": metadata["tool"],
             },
         }
         depo = metadata["deposit"]
         return cls(
             url=url,
             filename=filename,
             author_date=depo["author_date"],
             commit_date=depo["committer_date"],
             client=depo["client"],
             id=depo["id"],
             collection=depo["collection"],
             author=parse_author(depo["author"]),
             committer=parse_author(depo["committer"]),
             revision_parents=tuple(hash_to_bytes(p) for p in depo["revision_parents"]),
             raw_info=raw_info,
             directory_extrinsic_metadata=[
                 RawExtrinsicMetadataCore(
                     discovery_date=now(),
                     metadata=raw_metadata.encode(),
                     format="sword-v2-atom-codemeta-v2",
                 )
                 for raw_metadata in all_metadata_raw
             ],
         )
 
     def extid(self) -> None:
         # For now, we don't try to deduplicate deposits. There is little point anyway,
         # as it only happens when the exact same tarball was deposited twice.
         return None
 
 
 class DepositLoader(PackageLoader[DepositPackageInfo]):
     """Load a deposited artifact into swh archive.
 
     """
 
     visit_type = "deposit"
 
     def __init__(
         self,
         storage: StorageInterface,
         url: str,
         deposit_id: str,
         deposit_client: "ApiClient",
         max_content_size: Optional[int] = None,
         default_filename: str = "archive.tar",
     ):
         """Constructor
 
         Args:
             url: Origin url to associate the artifacts/metadata to
             deposit_id: Deposit identity
             deposit_client: Deposit api client
 
         """
         super().__init__(storage=storage, url=url, max_content_size=max_content_size)
 
         self.deposit_id = deposit_id
         self.client = deposit_client
         self.default_filename = default_filename
 
     @classmethod
     def from_configfile(cls, **kwargs: Any):
         """Instantiate a loader from the configuration loaded from the
         SWH_CONFIG_FILENAME envvar, with potential extra keyword arguments if their
         value is not None.
 
         Args:
             kwargs: kwargs passed to the loader instantiation
 
         """
         config = dict(load_from_envvar(DEFAULT_CONFIG))
         config.update({k: v for k, v in kwargs.items() if v is not None})
         deposit_client = ApiClient(**config.pop("deposit"))
         return cls.from_config(deposit_client=deposit_client, **config)
 
     def get_versions(self) -> Sequence[str]:
         # only 1 branch 'HEAD' with no alias since we only have 1 snapshot
         # branch
         return ["HEAD"]
 
     def get_metadata_authority(self) -> MetadataAuthority:
         provider = self.metadata()["provider"]
         assert provider["provider_type"] == MetadataAuthorityType.DEPOSIT_CLIENT.value
         return MetadataAuthority(
             type=MetadataAuthorityType.DEPOSIT_CLIENT,
             url=provider["provider_url"],
             metadata={
                 "name": provider["provider_name"],
                 **(provider["metadata"] or {}),
             },
         )
 
     def get_metadata_fetcher(self) -> MetadataFetcher:
         tool = self.metadata()["tool"]
         return MetadataFetcher(
             name=tool["name"], version=tool["version"], metadata=tool["configuration"],
         )
 
     def get_package_info(
         self, version: str
     ) -> Iterator[Tuple[str, DepositPackageInfo]]:
         p_info = DepositPackageInfo.from_metadata(
             self.metadata(), url=self.url, filename=self.default_filename,
         )
         yield "HEAD", p_info
 
     def download_package(
         self, p_info: DepositPackageInfo, tmpdir: str
     ) -> List[Tuple[str, Mapping]]:
         """Override to allow use of the dedicated deposit client
 
         """
         return [self.client.archive_get(self.deposit_id, tmpdir, p_info.filename)]
 
     def build_revision(
         self, p_info: DepositPackageInfo, uncompressed_path: str, directory: Sha1Git
     ) -> Optional[Revision]:
         message = (
             f"{p_info.client}: Deposit {p_info.id} in collection {p_info.collection}"
         ).encode("utf-8")
 
         return Revision(
             type=RevisionType.TAR,
             message=message,
             author=p_info.author,
             date=TimestampWithTimezone.from_dict(p_info.author_date),
             committer=p_info.committer,
             committer_date=TimestampWithTimezone.from_dict(p_info.commit_date),
             parents=p_info.revision_parents,
             directory=directory,
             synthetic=True,
-            metadata={
-                "extrinsic": {
-                    "provider": self.client.metadata_url(self.deposit_id),
-                    "when": self.visit_date.isoformat(),
-                    "raw": p_info.raw_info,
-                },
-            },
         )
 
     def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]:
         metadata = self.metadata()
         all_metadata_raw: List[str] = metadata["metadata_raw"]
         origin_metadata = json.dumps(
             {
                 "metadata": all_metadata_raw,
                 "provider": metadata["provider"],
                 "tool": metadata["tool"],
             }
         ).encode()
         return [
             RawExtrinsicMetadataCore(
                 discovery_date=now(),
                 metadata=raw_meta.encode(),
                 format="sword-v2-atom-codemeta-v2",
             )
             for raw_meta in all_metadata_raw
         ] + [
             RawExtrinsicMetadataCore(
                 discovery_date=now(),
                 metadata=origin_metadata,
                 format="original-artifacts-json",
             )
         ]
 
     @cached_method
     def metadata(self):
         """Returns metadata from the deposit server"""
         return self.client.metadata_get(self.deposit_id)
 
     def load(self) -> Dict:
         # First making sure the deposit is known on the deposit's RPC server
         # prior to trigger a loading
         try:
             self.metadata()
         except ValueError:
             logger.error(f"Unknown deposit {self.deposit_id}, ignoring")
             return {"status": "failed"}
 
         # Then usual loading
         return super().load()
 
     def finalize_visit(self, status_visit: str, **kwargs) -> Dict[str, Any]:
         r = super().finalize_visit(status_visit=status_visit, **kwargs)
         success = status_visit == "full"
 
         # Update deposit status
         try:
             if not success:
                 self.client.status_update(self.deposit_id, status="failed")
                 return r
 
             snapshot_id = hash_to_bytes(r["snapshot_id"])
             snapshot = snapshot_get_all_branches(self.storage, snapshot_id)
             if not snapshot:
                 return r
             branches = snapshot.branches
             logger.debug("branches: %s", branches)
             if not branches:
                 return r
             rev_id = branches[b"HEAD"].target
 
             revision = self.storage.revision_get([rev_id])[0]
             if not revision:
                 return r
 
             # update the deposit's status to success with its
             # revision-id and directory-id
             self.client.status_update(
                 self.deposit_id,
                 status="done",
                 revision_id=hash_to_hex(rev_id),
                 directory_id=hash_to_hex(revision.directory),
                 snapshot_id=r["snapshot_id"],
                 origin_url=self.url,
             )
         except Exception:
             logger.exception("Problem when trying to update the deposit's status")
             return {"status": "failed"}
         return r
 
 
 def parse_author(author) -> Person:
     """See prior fixme
 
     """
     return Person(
         fullname=author["fullname"].encode("utf-8"),
         name=author["name"].encode("utf-8"),
         email=author["email"].encode("utf-8"),
     )
 
 
 class ApiClient:
     """Private Deposit Api client
 
     """
 
     def __init__(self, url, auth: Optional[Mapping[str, str]]):
         self.base_url = url.rstrip("/")
         self.auth = None if not auth else (auth["username"], auth["password"])
 
     def do(self, method: str, url: str, *args, **kwargs):
         """Internal method to deal with requests, possibly with basic http
            authentication.
 
         Args:
             method (str): supported http methods as in get/post/put
 
         Returns:
             The request's execution output
 
         """
         method_fn = getattr(requests, method)
         if self.auth:
             kwargs["auth"] = self.auth
         return method_fn(url, *args, **kwargs)
 
     def archive_get(
         self, deposit_id: Union[int, str], tmpdir: str, filename: str
     ) -> Tuple[str, Dict]:
         """Retrieve deposit's archive artifact locally
 
         """
         url = f"{self.base_url}/{deposit_id}/raw/"
         return download(url, dest=tmpdir, filename=filename, auth=self.auth)
 
     def metadata_url(self, deposit_id: Union[int, str]) -> str:
         return f"{self.base_url}/{deposit_id}/meta/"
 
     def metadata_get(self, deposit_id: Union[int, str]) -> Dict[str, Any]:
         """Retrieve deposit's metadata artifact as json
 
         """
         url = self.metadata_url(deposit_id)
         r = self.do("get", url)
         if r.ok:
             return r.json()
 
         msg = f"Problem when retrieving deposit metadata at {url}"
         logger.error(msg)
         raise ValueError(msg)
 
     def status_update(
         self,
         deposit_id: Union[int, str],
         status: str,
         revision_id: Optional[str] = None,
         directory_id: Optional[str] = None,
         snapshot_id: Optional[str] = None,
         origin_url: Optional[str] = None,
     ):
         """Update deposit's information including status, and persistent
            identifiers result of the loading.
 
         """
         url = f"{self.base_url}/{deposit_id}/update/"
         payload = {"status": status}
         if revision_id:
             payload["revision_id"] = revision_id
         if directory_id:
             payload["directory_id"] = directory_id
         if snapshot_id:
             payload["snapshot_id"] = snapshot_id
         if origin_url:
             payload["origin_url"] = origin_url
 
         self.do("put", url, json=payload)
diff --git a/swh/loader/package/deposit/tests/test_deposit.py b/swh/loader/package/deposit/tests/test_deposit.py
index d5a7063..9a210ec 100644
--- a/swh/loader/package/deposit/tests/test_deposit.py
+++ b/swh/loader/package/deposit/tests/test_deposit.py
@@ -1,516 +1,453 @@
 # Copyright (C) 2019-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import json
 import re
 from typing import List
 
 import pytest
 
 from swh.core.pytest_plugin import requests_mock_datadir_factory
 from swh.loader.package.deposit.loader import ApiClient, DepositLoader
 from swh.loader.package.loader import now
-from swh.loader.package.tests.common import check_metadata_paths
 from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
 from swh.model.hashutil import hash_to_bytes, hash_to_hex
 from swh.model.identifiers import (
     CoreSWHID,
     ExtendedObjectType,
     ExtendedSWHID,
     ObjectType,
 )
 from swh.model.model import (
     MetadataAuthority,
     MetadataAuthorityType,
     MetadataFetcher,
     Origin,
     RawExtrinsicMetadata,
     Snapshot,
     SnapshotBranch,
     TargetType,
 )
 
 DEPOSIT_URL = "https://deposit.softwareheritage.org/1/private"
 
 
 @pytest.fixture
 def requests_mock_datadir(requests_mock_datadir):
     """Enhance default mock data to mock put requests as the loader does some
        internal update queries there.
 
     """
     requests_mock_datadir.put(re.compile("https"))
     return requests_mock_datadir
 
 
 def test_deposit_init_ok(swh_storage, deposit_client, swh_loader_config):
     url = "some-url"
     deposit_id = 999
     loader = DepositLoader(
         swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip"
     )  # Something that does not exist
 
     assert loader.url == url
     assert loader.client is not None
     assert loader.client.base_url == swh_loader_config["deposit"]["url"]
 
 
 def test_deposit_from_configfile(swh_config):
     """Ensure the deposit instantiation is ok
 
     """
     loader = DepositLoader.from_configfile(
         url="some-url", deposit_id="666", default_filename="archive.zip"
     )
 
     assert isinstance(loader.client, ApiClient)
 
 
 def test_deposit_loading_unknown_deposit(
     swh_storage, deposit_client, requests_mock_datadir
 ):
     """Loading an unknown deposit should fail
 
     no origin, no visit, no snapshot
     """
     # private api url form: 'https://deposit.s.o/1/private/hal/666/raw/'
     url = "some-url"
     unknown_deposit_id = 667
     loader = DepositLoader(
         swh_storage,
         url,
         unknown_deposit_id,
         deposit_client,
         default_filename="archive.zip",
     )  # does not exist
 
     actual_load_status = loader.load()
     assert actual_load_status == {"status": "failed"}
 
     stats = get_stats(loader.storage)
 
     assert {
         "content": 0,
         "directory": 0,
         "origin": 0,
         "origin_visit": 0,
         "release": 0,
         "revision": 0,
         "skipped_content": 0,
         "snapshot": 0,
     } == stats
 
 
 requests_mock_datadir_missing_one = requests_mock_datadir_factory(
     ignore_urls=[f"{DEPOSIT_URL}/666/raw/",]
 )
 
 
 def test_deposit_loading_failure_to_retrieve_1_artifact(
     swh_storage, deposit_client, requests_mock_datadir_missing_one
 ):
     """Deposit with missing artifact ends up with an uneventful/partial visit
 
     """
     # private api url form: 'https://deposit.s.o/1/private/hal/666/raw/'
     url = "some-url-2"
     deposit_id = 666
     requests_mock_datadir_missing_one.put(re.compile("https"))
     loader = DepositLoader(
         swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip"
     )
 
     actual_load_status = loader.load()
     assert actual_load_status["status"] == "uneventful"
     assert actual_load_status["snapshot_id"] is not None
 
     assert_last_visit_matches(loader.storage, url, status="partial", type="deposit")
 
     stats = get_stats(loader.storage)
     assert {
         "content": 0,
         "directory": 0,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 0,
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     # Retrieve the information for deposit status update query to the deposit
     urls = [
         m
         for m in requests_mock_datadir_missing_one.request_history
         if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/"
     ]
 
     assert len(urls) == 1
     update_query = urls[0]
 
     body = update_query.json()
     expected_body = {
         "status": "failed",
     }
 
     assert body == expected_body
 
 
-def test_deposit_revision_metadata_structure(
-    swh_storage, deposit_client, requests_mock_datadir
-):
-    url = "https://hal-test.archives-ouvertes.fr/some-external-id"
-    deposit_id = 666
-    loader = DepositLoader(
-        swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip"
-    )
-
-    actual_load_status = loader.load()
-    assert actual_load_status["status"] == "eventful"
-    assert actual_load_status["snapshot_id"] is not None
-    expected_revision_id = hash_to_bytes("637318680351f5d78856d13264faebbd91efe9bb")
-    revision = loader.storage.revision_get([expected_revision_id])[0]
-    assert revision is not None
-
-    check_metadata_paths(
-        revision.metadata,
-        paths=[
-            ("extrinsic.provider", str),
-            ("extrinsic.when", str),
-            ("extrinsic.raw", dict),
-            ("original_artifact", list),
-        ],
-    )
-
-    # Only 2 top-level keys now
-    assert set(revision.metadata.keys()) == {"extrinsic", "original_artifact"}
-
-    for original_artifact in revision.metadata["original_artifact"]:
-        check_metadata_paths(
-            original_artifact,
-            paths=[("filename", str), ("length", int), ("checksums", dict),],
-        )
-
-
 def test_deposit_loading_ok(swh_storage, deposit_client, requests_mock_datadir):
     url = "https://hal-test.archives-ouvertes.fr/some-external-id"
     deposit_id = 666
     loader = DepositLoader(
         swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip"
     )
 
     actual_load_status = loader.load()
     expected_snapshot_id = "b2b327b33dc85818bd23c3ccda8b7e675a66ecbd"
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id,
     }
 
     assert_last_visit_matches(loader.storage, url, status="full", type="deposit")
 
     stats = get_stats(loader.storage)
     assert {
         "content": 303,
         "directory": 12,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 1,
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     revision_id_hex = "637318680351f5d78856d13264faebbd91efe9bb"
     revision_id = hash_to_bytes(revision_id_hex)
 
     expected_snapshot = Snapshot(
         id=hash_to_bytes(expected_snapshot_id),
         branches={
             b"HEAD": SnapshotBranch(
                 target=revision_id, target_type=TargetType.REVISION,
             ),
         },
     )
     check_snapshot(expected_snapshot, storage=loader.storage)
 
     revision = loader.storage.revision_get([revision_id])[0]
     assert revision is not None
 
     # check metadata
 
     fetcher = MetadataFetcher(name="swh-deposit", version="0.0.1",)
 
     authority = MetadataAuthority(
         type=MetadataAuthorityType.DEPOSIT_CLIENT,
         url="https://hal-test.archives-ouvertes.fr/",
     )
 
     # Check origin metadata
     orig_meta = loader.storage.raw_extrinsic_metadata_get(
         Origin(url).swhid(), authority
     )
     assert orig_meta.next_page_token is None
     raw_meta = loader.client.metadata_get(deposit_id)
     all_metadata_raw: List[str] = raw_meta["metadata_raw"]
     # 2 raw metadata xml + 1 json dict
     assert len(orig_meta.results) == len(all_metadata_raw) + 1
     orig_meta0 = orig_meta.results[0]
     assert orig_meta0.authority == authority
     assert orig_meta0.fetcher == fetcher
 
     # Check directory metadata
     directory_swhid = CoreSWHID(
         object_type=ObjectType.DIRECTORY, object_id=revision.directory
     )
     actual_dir_meta = loader.storage.raw_extrinsic_metadata_get(
         directory_swhid, authority
     )
     assert actual_dir_meta.next_page_token is None
     assert len(actual_dir_meta.results) == len(all_metadata_raw)
     for dir_meta in actual_dir_meta.results:
         assert dir_meta.authority == authority
         assert dir_meta.fetcher == fetcher
         assert dir_meta.metadata.decode() in all_metadata_raw
 
     # Retrieve the information for deposit status update query to the deposit
     urls = [
         m
         for m in requests_mock_datadir.request_history
         if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/"
     ]
 
     assert len(urls) == 1
     update_query = urls[0]
 
     body = update_query.json()
     expected_body = {
         "status": "done",
         "revision_id": revision_id_hex,
         "directory_id": hash_to_hex(revision.directory),
         "snapshot_id": expected_snapshot_id,
         "origin_url": url,
     }
 
     assert body == expected_body
 
 
 def test_deposit_loading_ok_2(swh_storage, deposit_client, requests_mock_datadir):
     """Field dates should be se appropriately
 
     """
     external_id = "some-external-id"
     url = f"https://hal-test.archives-ouvertes.fr/{external_id}"
     deposit_id = 777
     loader = DepositLoader(
         swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip"
     )
 
     actual_load_status = loader.load()
     expected_snapshot_id = "3e68440fdd7c81d283f8f3aebb6f0c8657864192"
 
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id,
     }
     assert_last_visit_matches(loader.storage, url, status="full", type="deposit")
 
     revision_id = "564d18943d71be80d0d73b43a77cfb205bcde96c"
     expected_snapshot = Snapshot(
         id=hash_to_bytes(expected_snapshot_id),
         branches={
             b"HEAD": SnapshotBranch(
                 target=hash_to_bytes(revision_id), target_type=TargetType.REVISION
             )
         },
     )
 
     check_snapshot(expected_snapshot, storage=loader.storage)
 
     raw_meta = loader.client.metadata_get(deposit_id)
     # Ensure the date fields are set appropriately in the revision
 
     # Retrieve the revision
     revision = loader.storage.revision_get([hash_to_bytes(revision_id)])[0]
     assert revision
     assert revision.date.to_dict() == raw_meta["deposit"]["author_date"]
     assert revision.committer_date.to_dict() == raw_meta["deposit"]["committer_date"]
-
-    read_api = f"{DEPOSIT_URL}/{deposit_id}/meta/"
+    assert not revision.metadata
 
     provider = {
         "provider_name": "hal",
         "provider_type": "deposit_client",
         "provider_url": "https://hal-test.archives-ouvertes.fr/",
         "metadata": None,
     }
     tool = {
         "name": "swh-deposit",
         "version": "0.0.1",
         "configuration": {"sword_version": "2"},
     }
-    assert revision.metadata == {
-        "extrinsic": {
-            "provider": read_api,
-            "raw": {
-                "origin": {"type": "deposit", "url": url,},
-                "origin_metadata": {
-                    "metadata": raw_meta["metadata_dict"],
-                    "provider": provider,
-                    "tool": tool,
-                },
-            },
-            "when": revision.metadata["extrinsic"]["when"],  # dynamic
-        },
-        "original_artifact": [
-            {
-                "checksums": {
-                    "sha1": "f8c63d7c890a7453498e6cf9fef215d85ec6801d",
-                    "sha256": "474bf646aeeff6d945eb752b1a9f8a40f3d81a88909ee7bd2d08cc822aa361e6",  # noqa
-                },
-                "filename": "archive.zip",
-                "length": 956830,
-                "url": "https://deposit.softwareheritage.org/1/private/777/raw/",
-            }
-        ],
-    }
 
     fetcher = MetadataFetcher(name="swh-deposit", version="0.0.1",)
 
     authority = MetadataAuthority(
         type=MetadataAuthorityType.DEPOSIT_CLIENT,
         url="https://hal-test.archives-ouvertes.fr/",
     )
 
     # Check the origin metadata swh side
     origin_extrinsic_metadata = loader.storage.raw_extrinsic_metadata_get(
         Origin(url).swhid(), authority
     )
     assert origin_extrinsic_metadata.next_page_token is None
     all_metadata_raw: List[str] = raw_meta["metadata_raw"]
     # 1 raw metadata xml + 1 json dict
     assert len(origin_extrinsic_metadata.results) == len(all_metadata_raw) + 1
 
     origin_swhid = Origin(url).swhid()
 
     expected_metadata = []
     for idx, raw_meta in enumerate(all_metadata_raw):
         origin_meta = origin_extrinsic_metadata.results[idx]
         expected_metadata.append(
             RawExtrinsicMetadata(
                 target=origin_swhid,
                 discovery_date=origin_meta.discovery_date,
                 metadata=raw_meta.encode(),
                 format="sword-v2-atom-codemeta-v2",
                 authority=authority,
                 fetcher=fetcher,
             )
         )
 
     origin_metadata = {
         "metadata": all_metadata_raw,
         "provider": provider,
         "tool": tool,
     }
     expected_metadata.append(
         RawExtrinsicMetadata(
             target=origin_swhid,
             discovery_date=origin_extrinsic_metadata.results[-1].discovery_date,
             metadata=json.dumps(origin_metadata).encode(),
             format="original-artifacts-json",
             authority=authority,
             fetcher=fetcher,
         )
     )
 
     assert sorted(origin_extrinsic_metadata.results) == sorted(expected_metadata)
 
     # Check the revision metadata swh side
     directory_swhid = ExtendedSWHID(
         object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory
     )
     actual_directory_metadata = loader.storage.raw_extrinsic_metadata_get(
         directory_swhid, authority
     )
 
     assert actual_directory_metadata.next_page_token is None
     assert len(actual_directory_metadata.results) == len(all_metadata_raw)
 
     revision_swhid = CoreSWHID(
         object_type=ObjectType.REVISION, object_id=hash_to_bytes(revision_id)
     )
     dir_metadata_template = RawExtrinsicMetadata(
         target=directory_swhid,
         format="sword-v2-atom-codemeta-v2",
         authority=authority,
         fetcher=fetcher,
         origin=url,
         revision=revision_swhid,
         # to satisfy the constructor
         discovery_date=now(),
         metadata=b"",
     )
 
     expected_directory_metadata = []
     for idx, raw_meta in enumerate(all_metadata_raw):
         dir_metadata = actual_directory_metadata.results[idx]
         expected_directory_metadata.append(
             RawExtrinsicMetadata.from_dict(
                 {
                     **{
                         k: v
                         for (k, v) in dir_metadata_template.to_dict().items()
                         if k != "id"
                     },
                     "discovery_date": dir_metadata.discovery_date,
                     "metadata": raw_meta.encode(),
                 }
             )
         )
 
     assert sorted(actual_directory_metadata.results) == sorted(
         expected_directory_metadata
     )
 
     # Retrieve the information for deposit status update query to the deposit
     urls = [
         m
         for m in requests_mock_datadir.request_history
         if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/"
     ]
 
     assert len(urls) == 1
     update_query = urls[0]
 
     body = update_query.json()
     expected_body = {
         "status": "done",
         "revision_id": revision_id,
         "directory_id": hash_to_hex(revision.directory),
         "snapshot_id": expected_snapshot_id,
         "origin_url": url,
     }
 
     assert body == expected_body
 
 
 def test_deposit_loading_ok_3(swh_storage, deposit_client, requests_mock_datadir):
     """Deposit loading can happen on tarball artifacts as well
 
     The latest deposit changes introduce the internal change.
 
     """
     external_id = "hal-123456"
     url = f"https://hal-test.archives-ouvertes.fr/{external_id}"
     deposit_id = 888
     loader = DepositLoader(swh_storage, url, deposit_id, deposit_client)
 
     actual_load_status = loader.load()
     expected_snapshot_id = "0ac7b54c042a026389f2087dc16f1d5c644ed0e4"
 
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id,
     }
     assert_last_visit_matches(loader.storage, url, status="full", type="deposit")
diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py
index 820a75f..5656e3c 100644
--- a/swh/loader/package/loader.py
+++ b/swh/loader/package/loader.py
@@ -1,1036 +1,977 @@
 # Copyright (C) 2019-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import datetime
 import hashlib
 from itertools import islice
 import json
 import logging
 import os
 import string
 import sys
 import tempfile
 from typing import (
     Any,
     Dict,
     Generic,
     Iterable,
     Iterator,
     List,
     Mapping,
     Optional,
     Sequence,
     Set,
     Tuple,
     TypeVar,
 )
 
 import attr
 import sentry_sdk
 
 from swh.core.tarball import uncompress
 from swh.loader.core.loader import BaseLoader
 from swh.loader.exception import NotFound
 from swh.loader.package.utils import download
 from swh.model import from_disk
 from swh.model.collections import ImmutableDict
 from swh.model.hashutil import hash_to_hex
 from swh.model.identifiers import (
     CoreSWHID,
     ExtendedObjectType,
     ExtendedSWHID,
     ObjectType,
 )
 from swh.model.model import (
     ExtID,
     MetadataAuthority,
     MetadataAuthorityType,
     MetadataFetcher,
     Origin,
     OriginVisit,
     OriginVisitStatus,
     RawExtrinsicMetadata,
     Revision,
     Sha1Git,
     Snapshot,
     TargetType,
 )
 from swh.storage.algos.snapshot import snapshot_get_latest
 from swh.storage.interface import StorageInterface
 from swh.storage.utils import now
 
 logger = logging.getLogger(__name__)
 
 
 SWH_METADATA_AUTHORITY = MetadataAuthority(
     type=MetadataAuthorityType.REGISTRY,
     url="https://softwareheritage.org/",
     metadata={},
 )
 """Metadata authority for extrinsic metadata generated by Software Heritage.
 Used for metadata on "original artifacts", ie. length, filename, and checksums
 of downloaded archive files."""
 
 
 PartialExtID = Tuple[str, bytes]
 """The ``extid_type`` and ``extid`` fields of an :ref:py:`ExtID` object."""
 
 
 @attr.s
 class RawExtrinsicMetadataCore:
     """Contains the core of the metadata extracted by a loader, that will be
     used to build a full RawExtrinsicMetadata object by adding object identifier,
     context, and provenance information."""
 
     format = attr.ib(type=str)
     metadata = attr.ib(type=bytes)
     discovery_date = attr.ib(type=Optional[datetime.datetime], default=None)
     """Defaults to the visit date."""
 
 
 @attr.s
 class BasePackageInfo:
     """Compute the primary key for a dict using the id_keys as primary key
        composite.
 
     Args:
         d: A dict entry to compute the primary key on
         id_keys: Sequence of keys to use as primary key
 
     Returns:
         The identity for that dict entry
 
     """
 
     url = attr.ib(type=str)
     filename = attr.ib(type=Optional[str])
 
     MANIFEST_FORMAT: Optional[string.Template] = None
     """If not None, used by the default extid() implementation to format a manifest,
     before hashing it to produce an ExtID."""
 
     EXTID_TYPE: str = "package-manifest-sha256"
 
     # The following attribute has kw_only=True in order to allow subclasses
     # to add attributes. Without kw_only, attributes without default values cannot
     # go after attributes with default values.
     # See <https://github.com/python-attrs/attrs/issues/38>
 
     directory_extrinsic_metadata = attr.ib(
         type=List[RawExtrinsicMetadataCore], default=[], kw_only=True,
     )
     """:term:`extrinsic metadata` collected by the loader, that will be attached to the
     loaded directory and added to the Metadata storage."""
 
     # TODO: add support for metadata for revisions and contents
 
     def extid(self) -> Optional[PartialExtID]:
         """Returns a unique intrinsic identifier of this package info,
         or None if this package info is not 'deduplicatable' (meaning that
         we will always load it, instead of checking the ExtID storage
         to see if we already did)"""
         if self.MANIFEST_FORMAT is None:
             return None
         else:
             manifest = self.MANIFEST_FORMAT.substitute(
                 {k: str(v) for (k, v) in attr.asdict(self).items()}
             )
             return (self.EXTID_TYPE, hashlib.sha256(manifest.encode()).digest())
 
 
 TPackageInfo = TypeVar("TPackageInfo", bound=BasePackageInfo)
 
 
 class PackageLoader(BaseLoader, Generic[TPackageInfo]):
     # Origin visit type (str) set by the loader
     visit_type = ""
     visit_date: datetime.datetime
 
     def __init__(
         self,
         storage: StorageInterface,
         url: str,
         max_content_size: Optional[int] = None,
     ):
         """Loader's constructor. This raises exception if the minimal required
            configuration is missing (cf. fn:`check` method).
 
         Args:
             storage: Storage instance
             url: Origin url to load data from
 
         """
         super().__init__(storage=storage, max_content_size=max_content_size)
         self.url = url
         self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc)
 
     def get_versions(self) -> Sequence[str]:
         """Return the list of all published package versions.
 
         Raises:
            `class:swh.loader.exception.NotFound` error when failing to read the
             published package versions.
 
         Returns:
             Sequence of published versions
 
         """
         return []
 
     def get_package_info(self, version: str) -> Iterator[Tuple[str, TPackageInfo]]:
         """Given a release version of a package, retrieve the associated
            package information for such version.
 
         Args:
             version: Package version
 
         Returns:
             (branch name, package metadata)
 
         """
         yield from {}
 
     def build_revision(
         self, p_info: TPackageInfo, uncompressed_path: str, directory: Sha1Git
     ) -> Optional[Revision]:
         """Build the revision from the archive metadata (extrinsic
         artifact metadata) and the intrinsic metadata.
 
         Args:
             p_info: Package information
             uncompressed_path: Artifact uncompressed path on disk
 
         Returns:
             Revision object
 
         """
         raise NotImplementedError("build_revision")
 
     def get_default_version(self) -> str:
         """Retrieve the latest release version if any.
 
         Returns:
             Latest version
 
         """
         return ""
 
     def last_snapshot(self) -> Optional[Snapshot]:
         """Retrieve the last snapshot out of the last visit.
 
         """
         return snapshot_get_latest(self.storage, self.url)
 
     def known_artifacts(
         self, snapshot: Optional[Snapshot]
     ) -> Dict[Sha1Git, Optional[ImmutableDict[str, object]]]:
         """Retrieve the known releases/artifact for the origin.
 
         Args
             snapshot: snapshot for the visit
 
         Returns:
             Dict of keys revision id (bytes), values a metadata Dict.
 
         """
         if not snapshot:
             return {}
 
         # retrieve only revisions (e.g the alias we do not want here)
         revs = [
             rev.target
             for rev in snapshot.branches.values()
             if rev and rev.target_type == TargetType.REVISION
         ]
         known_revisions = self.storage.revision_get(revs)
         return {
             revision.id: revision.metadata for revision in known_revisions if revision
         }
 
     def new_packageinfo_to_extid(self, p_info: TPackageInfo) -> Optional[PartialExtID]:
         return p_info.extid()
 
-    def known_artifact_to_extid(self, known_artifact: Dict) -> Optional[PartialExtID]:
-        """Returns a unique intrinsic identifier of a downloaded artifact,
-        used to check if a new artifact is the same."""
-        return None
-
-    def resolve_revision_from_artifacts(
-        self, known_artifacts: Dict[Sha1Git, Any], p_info: TPackageInfo,
-    ) -> Optional[Sha1Git]:
-        """Resolve the revision from known artifact metadata and a package info object.
-
-        If the artifact has already been downloaded, this will return the
-        existing revision targeting that uncompressed artifact directory.
-        Otherwise, this returns None.
-
-        Args:
-            known_artifacts: dict from revision ids to revision metadata
-            p_info: Package information
-
-        Returns:
-            None or revision identifier
-
-        """
-        if not known_artifacts:
-            # No known artifact, no need to compute the artifact's extid
-            return None
-
-        new_extid = self.new_packageinfo_to_extid(p_info)
-        if new_extid is None:
-            # This loader does not support deduplication, at least not for this
-            # artifact.
-            return None
-
-        for rev_id, known_artifact in known_artifacts.items():
-            known_extid = self.known_artifact_to_extid(known_artifact)
-            if new_extid == known_extid:
-                return rev_id
-
-        return None
-
     def _get_known_extids(
         self, packages_info: List[TPackageInfo]
     ) -> Dict[PartialExtID, List[CoreSWHID]]:
         """Compute the ExtIDs from new PackageInfo objects, searches which are already
         loaded in the archive, and returns them if any."""
 
         # Compute the ExtIDs of all the new packages, grouped by extid type
         new_extids: Dict[str, List[bytes]] = {}
         for p_info in packages_info:
             res = p_info.extid()
             if res is not None:
                 (extid_type, extid_extid) = res
                 new_extids.setdefault(extid_type, []).append(extid_extid)
 
         # For each extid type, call extid_get_from_extid() with all the extids of
         # that type, and store them in the '(type, extid) -> target' map.
         known_extids: Dict[PartialExtID, List[CoreSWHID]] = {}
         for (extid_type, extids) in new_extids.items():
             for extid in self.storage.extid_get_from_extid(extid_type, extids):
                 if extid is not None:
                     key = (extid.extid_type, extid.extid)
                     known_extids.setdefault(key, []).append(extid.target)
 
         return known_extids
 
     def resolve_revision_from_extids(
         self,
         known_extids: Dict[PartialExtID, List[CoreSWHID]],
         p_info: TPackageInfo,
         revision_whitelist: Set[Sha1Git],
     ) -> Optional[Sha1Git]:
         """Resolve the revision from known ExtIDs and a package info object.
 
         If the artifact has already been downloaded, this will return the
         existing revision targeting that uncompressed artifact directory.
         Otherwise, this returns None.
 
         Args:
             known_extids: Dict built from a list of ExtID, with the target as value
             p_info: Package information
             revision_whitelist: Any ExtID with target not in this set is filtered out
 
         Returns:
             None or revision identifier
 
         """
         new_extid = p_info.extid()
         if new_extid is None:
             return None
 
         for extid_target in known_extids.get(new_extid, []):
             if extid_target.object_id not in revision_whitelist:
                 # There is a known ExtID for this package, but its target is not
                 # in the snapshot.
                 # This can happen for three reasons:
                 #
                 # 1. a loader crashed after writing the ExtID, but before writing
                 #    the snapshot
                 # 2. some other loader loaded the same artifact, but produced
                 #    a different revision, causing an additional ExtID object
                 #    to be written. We will probably find this loader's ExtID
                 #    in a future iteration of this loop.
                 #    Note that for now, this is impossible, as each loader has a
                 #    completely different extid_type, but this is an implementation
                 #    detail of each loader.
                 # 3. we took a snapshot, then the package disappeared,
                 #    then we took another snapshot, and the package reappeared
                 #
                 # In case of 1, we must actually load the package now,
                 # so let's do it.
                 # TODO: detect when we are in case 3 using revision_missing instead
                 # of the snapshot.
                 continue
             elif extid_target.object_type != ObjectType.REVISION:
                 # We only support revisions for now.
                 # Note that this case should never be reached unless there is a
                 # collision between a revision hash and some non-revision object's
                 # hash, but better safe than sorry.
                 logger.warning(
                     "%s is in the revision whitelist, but is not a revision.",
                     hash_to_hex(extid_target.object_type),
                 )
                 continue
             return extid_target.object_id
 
         return None
 
     def download_package(
         self, p_info: TPackageInfo, tmpdir: str
     ) -> List[Tuple[str, Mapping]]:
         """Download artifacts for a specific package. All downloads happen in
         in the tmpdir folder.
 
         Default implementation expects the artifacts package info to be
         about one artifact per package.
 
         Note that most implementation have 1 artifact per package. But some
         implementation have multiple artifacts per package (debian), some have
         none, the package is the artifact (gnu).
 
         Args:
             artifacts_package_info: Information on the package artifacts to
                 download (url, filename, etc...)
             tmpdir: Location to retrieve such artifacts
 
         Returns:
             List of (path, computed hashes)
 
         """
         return [download(p_info.url, dest=tmpdir, filename=p_info.filename)]
 
     def uncompress(
         self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str
     ) -> str:
         """Uncompress the artifact(s) in the destination folder dest.
 
         Optionally, this could need to use the p_info dict for some more
         information (debian).
 
         """
         uncompressed_path = os.path.join(dest, "src")
         for a_path, _ in dl_artifacts:
             uncompress(a_path, dest=uncompressed_path)
         return uncompressed_path
 
     def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]:
         """Return an extra dict of branches that are used to update the set of
         branches.
 
         """
         return {}
 
     def finalize_visit(
         self,
         *,
         snapshot: Optional[Snapshot],
         visit: OriginVisit,
         status_visit: str,
         status_load: str,
         failed_branches: List[str],
     ) -> Dict[str, Any]:
         """Finalize the visit:
 
         - flush eventual unflushed data to storage
         - update origin visit's status
         - return the task's status
 
         """
         self.storage.flush()
 
         snapshot_id: Optional[bytes] = None
         if snapshot and snapshot.id:  # to prevent the snapshot.id to b""
             snapshot_id = snapshot.id
         assert visit.visit
         visit_status = OriginVisitStatus(
             origin=self.url,
             visit=visit.visit,
             type=self.visit_type,
             date=now(),
             status=status_visit,
             snapshot=snapshot_id,
         )
         self.storage.origin_visit_status_add([visit_status])
         result: Dict[str, Any] = {
             "status": status_load,
         }
         if snapshot_id:
             result["snapshot_id"] = hash_to_hex(snapshot_id)
         if failed_branches:
             logger.warning("%d failed branches", len(failed_branches))
             for i, urls in enumerate(islice(failed_branches, 50)):
                 prefix_url = "Failed branches: " if i == 0 else ""
                 logger.warning("%s%s", prefix_url, urls)
 
         return result
 
     def load(self) -> Dict:
         """Load for a specific origin the associated contents.
 
         1. Get the list of versions in an origin.
 
         2. Get the snapshot from the previous run of the loader,
            and filter out versions that were already loaded, if their
            :term:`extids <extid>` match
 
         Then, for each remaining version in the origin
 
         3. Fetch the files for one package version By default, this can be
            implemented as a simple HTTP request. Loaders with more specific
            requirements can override this, e.g.: the PyPI loader checks the
            integrity of the downloaded files; the Debian loader has to download
            and check several files for one package version.
 
         4. Extract the downloaded files. By default, this would be a universal
            archive/tarball extraction.
 
            Loaders for specific formats can override this method (for instance,
            the Debian loader uses dpkg-source -x).
 
         5. Convert the extracted directory to a set of Software Heritage
            objects Using swh.model.from_disk.
 
         6. Extract the metadata from the unpacked directories This would only
            be applicable for "smart" loaders like npm (parsing the
            package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing
            debian/changelog and debian/control).
 
            On "minimal-metadata" sources such as the GNU archive, the lister
            should provide the minimal set of metadata needed to populate the
            revision/release objects (authors, dates) as an argument to the
            task.
 
         7. Generate the revision/release objects for the given version. From
            the data generated at steps 3 and 4.
 
         end for each
 
         8. Generate and load the snapshot for the visit
 
         Using the revisions/releases collected at step 7., and the branch
         information from step 2., generate a snapshot and load it into the
         Software Heritage archive
 
         """
         status_load = "uneventful"  # either: eventful, uneventful, failed
         status_visit = "full"  # see swh.model.model.OriginVisitStatus
         snapshot = None
         failed_branches: List[str] = []
 
         # Prepare origin and origin_visit
         origin = Origin(url=self.url)
         try:
             self.storage.origin_add([origin])
             visit = list(
                 self.storage.origin_visit_add(
                     [
                         OriginVisit(
                             origin=self.url, date=self.visit_date, type=self.visit_type,
                         )
                     ]
                 )
             )[0]
         except Exception as e:
             logger.exception("Failed to initialize origin_visit for %s", self.url)
             sentry_sdk.capture_exception(e)
             return {"status": "failed"}
 
         # Get the previous snapshot for this origin. It is then used to see which
         # of the package's versions are already loaded in the archive.
         try:
             last_snapshot = self.last_snapshot()
             logger.debug("last snapshot: %s", last_snapshot)
             known_artifacts = self.known_artifacts(last_snapshot)
             logger.debug("known artifacts: %s", known_artifacts)
         except Exception as e:
             logger.exception("Failed to get previous state for %s", self.url)
             sentry_sdk.capture_exception(e)
             return self.finalize_visit(
                 snapshot=snapshot,
                 visit=visit,
                 failed_branches=failed_branches,
                 status_visit="failed",
                 status_load="failed",
             )
 
         load_exceptions: List[Exception] = []
 
         # Get the list of all version names
         try:
             versions = self.get_versions()
         except NotFound:
             return self.finalize_visit(
                 snapshot=snapshot,
                 visit=visit,
                 failed_branches=failed_branches,
                 status_visit="not_found",
                 status_load="failed",
             )
         except Exception:
             return self.finalize_visit(
                 snapshot=snapshot,
                 visit=visit,
                 failed_branches=failed_branches,
                 status_visit="failed",
                 status_load="failed",
             )
 
         # Get the metadata of each version's package
         packages_info: List[Tuple[str, str, TPackageInfo]] = [
             (version, branch_name, p_info)
             for version in versions
             for (branch_name, p_info) in self.get_package_info(version)
         ]
 
         # Compute the ExtID of each of these packages
         known_extids = self._get_known_extids(
             [p_info for (_, _, p_info) in packages_info]
         )
 
         if last_snapshot is None:
             last_snapshot_targets: Set[Sha1Git] = set()
         else:
             last_snapshot_targets = {
                 branch.target for branch in last_snapshot.branches.values()
             }
 
         new_extids: Set[ExtID] = set()
         tmp_revisions: Dict[str, List[Tuple[str, Sha1Git]]] = {
             version: [] for version in versions
         }
         for (version, branch_name, p_info) in packages_info:
             logger.debug("package_info: %s", p_info)
 
             # Check if the package was already loaded, using its ExtID
             revision_id = self.resolve_revision_from_extids(
                 known_extids, p_info, last_snapshot_targets
             )
 
-            if revision_id is None:
-                # No existing revision found from an acceptable ExtID,
-                # search in the artifact data instead.
-                # TODO: remove this after we finished migrating to ExtIDs.
-                revision_id = self.resolve_revision_from_artifacts(
-                    known_artifacts, p_info
-                )
-
             if revision_id is None:
                 # No matching revision found in the last snapshot, load it.
                 try:
                     res = self._load_revision(p_info, origin)
                     if res:
                         (revision_id, directory_id) = res
                         assert revision_id
                         assert directory_id
                         self._load_extrinsic_directory_metadata(
                             p_info, revision_id, directory_id
                         )
                     self.storage.flush()
                     status_load = "eventful"
                 except Exception as e:
                     self.storage.clear_buffers()
                     load_exceptions.append(e)
                     sentry_sdk.capture_exception(e)
                     logger.exception(
                         "Failed loading branch %s for %s", branch_name, self.url
                     )
                     failed_branches.append(branch_name)
                     continue
 
                 if revision_id is None:
                     continue
 
                 partial_extid = p_info.extid()
                 if partial_extid is not None:
                     (extid_type, extid) = partial_extid
                     revision_swhid = CoreSWHID(
                         object_type=ObjectType.REVISION, object_id=revision_id
                     )
                     new_extids.add(
                         ExtID(extid_type=extid_type, extid=extid, target=revision_swhid)
                     )
 
             tmp_revisions[version].append((branch_name, revision_id))
 
         if load_exceptions:
             status_visit = "partial"
 
         if not tmp_revisions:
             # We could not load any revisions; fail completely
             return self.finalize_visit(
                 snapshot=snapshot,
                 visit=visit,
                 failed_branches=failed_branches,
                 status_visit="failed",
                 status_load="failed",
             )
 
         try:
             # Retrieve the default release version (the "latest" one)
             default_version = self.get_default_version()
             logger.debug("default version: %s", default_version)
             # Retrieve extra branches
             extra_branches = self.extra_branches()
             logger.debug("extra branches: %s", extra_branches)
 
             snapshot = self._load_snapshot(
                 default_version, tmp_revisions, extra_branches
             )
             self.storage.flush()
         except Exception as e:
             logger.exception("Failed to build snapshot for origin %s", self.url)
             sentry_sdk.capture_exception(e)
             status_visit = "failed"
             status_load = "failed"
 
         if snapshot:
             try:
                 metadata_objects = self.build_extrinsic_snapshot_metadata(snapshot.id)
                 self._load_metadata_objects(metadata_objects)
             except Exception as e:
                 logger.exception(
                     "Failed to load extrinsic snapshot metadata for %s", self.url
                 )
                 sentry_sdk.capture_exception(e)
                 status_visit = "partial"
                 status_load = "failed"
 
         try:
             metadata_objects = self.build_extrinsic_origin_metadata()
             self._load_metadata_objects(metadata_objects)
         except Exception as e:
             logger.exception(
                 "Failed to load extrinsic origin metadata for %s", self.url
             )
             sentry_sdk.capture_exception(e)
             status_visit = "partial"
             status_load = "failed"
 
         self._load_extids(new_extids)
 
         return self.finalize_visit(
             snapshot=snapshot,
             visit=visit,
             failed_branches=failed_branches,
             status_visit=status_visit,
             status_load=status_load,
         )
 
     def _load_directory(
         self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], tmpdir: str
     ) -> Tuple[str, from_disk.Directory]:
         uncompressed_path = self.uncompress(dl_artifacts, dest=tmpdir)
         logger.debug("uncompressed_path: %s", uncompressed_path)
 
         directory = from_disk.Directory.from_disk(
             path=uncompressed_path.encode("utf-8"),
             max_content_length=self.max_content_size,
         )
 
         contents, skipped_contents, directories = from_disk.iter_directory(directory)
 
         logger.debug("Number of skipped contents: %s", len(skipped_contents))
         self.storage.skipped_content_add(skipped_contents)
         logger.debug("Number of contents: %s", len(contents))
         self.storage.content_add(contents)
 
         logger.debug("Number of directories: %s", len(directories))
         self.storage.directory_add(directories)
 
         return (uncompressed_path, directory)
 
     def _load_revision(
         self, p_info: TPackageInfo, origin
     ) -> Optional[Tuple[Sha1Git, Sha1Git]]:
         """Does all the loading of a revision itself:
 
         * downloads a package and uncompresses it
         * loads it from disk
         * adds contents, directories, and revision to self.storage
         * returns (revision_id, directory_id)
 
         Raises
             exception when unable to download or uncompress artifacts
 
         """
         with tempfile.TemporaryDirectory() as tmpdir:
             dl_artifacts = self.download_package(p_info, tmpdir)
 
             (uncompressed_path, directory) = self._load_directory(dl_artifacts, tmpdir)
 
             # FIXME: This should be release. cf. D409
             revision = self.build_revision(
                 p_info, uncompressed_path, directory=directory.hash
             )
             if not revision:
                 # Some artifacts are missing intrinsic metadata
                 # skipping those
                 return None
 
         metadata = [metadata for (filepath, metadata) in dl_artifacts]
-        extra_metadata: Tuple[str, Any] = (
-            "original_artifact",
-            metadata,
-        )
-
-        if revision.metadata is not None:
-            full_metadata = list(revision.metadata.items()) + [extra_metadata]
-        else:
-            full_metadata = [extra_metadata]
-
-        # TODO: don't add these extrinsic metadata to the revision.
-        revision = attr.evolve(revision, metadata=ImmutableDict(full_metadata))
 
         original_artifact_metadata = RawExtrinsicMetadata(
             target=ExtendedSWHID(
                 object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory
             ),
             discovery_date=self.visit_date,
             authority=SWH_METADATA_AUTHORITY,
             fetcher=self.get_metadata_fetcher(),
             format="original-artifacts-json",
             metadata=json.dumps(metadata).encode(),
             origin=self.url,
             revision=CoreSWHID(object_type=ObjectType.REVISION, object_id=revision.id),
         )
         self._load_metadata_objects([original_artifact_metadata])
 
         logger.debug("Revision: %s", revision)
 
         self.storage.revision_add([revision])
         assert directory.hash
         return (revision.id, directory.hash)
 
     def _load_snapshot(
         self,
         default_version: str,
         revisions: Dict[str, List[Tuple[str, bytes]]],
         extra_branches: Dict[bytes, Mapping[str, Any]],
     ) -> Optional[Snapshot]:
         """Build snapshot out of the current revisions stored and extra branches.
            Then load it in the storage.
 
         """
         logger.debug("revisions: %s", revisions)
         # Build and load the snapshot
         branches = {}  # type: Dict[bytes, Mapping[str, Any]]
         for version, branch_name_revisions in revisions.items():
             if version == default_version and len(branch_name_revisions) == 1:
                 # only 1 branch (no ambiguity), we can create an alias
                 # branch 'HEAD'
                 branch_name, _ = branch_name_revisions[0]
                 # except for some corner case (deposit)
                 if branch_name != "HEAD":
                     branches[b"HEAD"] = {
                         "target_type": "alias",
                         "target": branch_name.encode("utf-8"),
                     }
 
             for branch_name, target in branch_name_revisions:
                 branches[branch_name.encode("utf-8")] = {
                     "target_type": "revision",
                     "target": target,
                 }
 
         # Deal with extra-branches
         for name, branch_target in extra_branches.items():
             if name in branches:
                 logger.error("Extra branch '%s' has been ignored", name)
             else:
                 branches[name] = branch_target
 
         snapshot_data = {"branches": branches}
         logger.debug("snapshot: %s", snapshot_data)
         snapshot = Snapshot.from_dict(snapshot_data)
         logger.debug("snapshot: %s", snapshot)
         self.storage.snapshot_add([snapshot])
 
         return snapshot
 
     def get_loader_name(self) -> str:
         """Returns a fully qualified name of this loader."""
         return f"{self.__class__.__module__}.{self.__class__.__name__}"
 
     def get_loader_version(self) -> str:
         """Returns the version of the current loader."""
         module_name = self.__class__.__module__ or ""
         module_name_parts = module_name.split(".")
 
         # Iterate rootward through the package hierarchy until we find a parent of this
         # loader's module with a __version__ attribute.
         for prefix_size in range(len(module_name_parts), 0, -1):
             package_name = ".".join(module_name_parts[0:prefix_size])
             module = sys.modules[package_name]
             if hasattr(module, "__version__"):
                 return module.__version__  # type: ignore
 
         # If this loader's class has no parent package with a __version__,
         # it should implement it itself.
         raise NotImplementedError(
             f"Could not dynamically find the version of {self.get_loader_name()}."
         )
 
     def get_metadata_fetcher(self) -> MetadataFetcher:
         """Returns a MetadataFetcher instance representing this package loader;
         which is used to for adding provenance information to extracted
         extrinsic metadata, if any."""
         return MetadataFetcher(
             name=self.get_loader_name(), version=self.get_loader_version(), metadata={},
         )
 
     def get_metadata_authority(self) -> MetadataAuthority:
         """For package loaders that get extrinsic metadata, returns the authority
         the metadata are coming from.
         """
         raise NotImplementedError("get_metadata_authority")
 
     def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]:
         """Returns metadata items, used by build_extrinsic_origin_metadata."""
         return []
 
     def build_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadata]:
         """Builds a list of full RawExtrinsicMetadata objects, using
         metadata returned by get_extrinsic_origin_metadata."""
         metadata_items = self.get_extrinsic_origin_metadata()
         if not metadata_items:
             # If this package loader doesn't write metadata, no need to require
             # an implementation for get_metadata_authority.
             return []
 
         authority = self.get_metadata_authority()
         fetcher = self.get_metadata_fetcher()
 
         metadata_objects = []
 
         for item in metadata_items:
             metadata_objects.append(
                 RawExtrinsicMetadata(
                     target=Origin(self.url).swhid(),
                     discovery_date=item.discovery_date or self.visit_date,
                     authority=authority,
                     fetcher=fetcher,
                     format=item.format,
                     metadata=item.metadata,
                 )
             )
 
         return metadata_objects
 
     def get_extrinsic_snapshot_metadata(self) -> List[RawExtrinsicMetadataCore]:
         """Returns metadata items, used by build_extrinsic_snapshot_metadata."""
         return []
 
     def build_extrinsic_snapshot_metadata(
         self, snapshot_id: Sha1Git
     ) -> List[RawExtrinsicMetadata]:
         """Builds a list of full RawExtrinsicMetadata objects, using
         metadata returned by get_extrinsic_snapshot_metadata."""
         metadata_items = self.get_extrinsic_snapshot_metadata()
         if not metadata_items:
             # If this package loader doesn't write metadata, no need to require
             # an implementation for get_metadata_authority.
             return []
 
         authority = self.get_metadata_authority()
         fetcher = self.get_metadata_fetcher()
 
         metadata_objects = []
 
         for item in metadata_items:
             metadata_objects.append(
                 RawExtrinsicMetadata(
                     target=ExtendedSWHID(
                         object_type=ExtendedObjectType.SNAPSHOT, object_id=snapshot_id
                     ),
                     discovery_date=item.discovery_date or self.visit_date,
                     authority=authority,
                     fetcher=fetcher,
                     format=item.format,
                     metadata=item.metadata,
                     origin=self.url,
                 )
             )
 
         return metadata_objects
 
     def build_extrinsic_directory_metadata(
         self, p_info: TPackageInfo, revision_id: Sha1Git, directory_id: Sha1Git,
     ) -> List[RawExtrinsicMetadata]:
         if not p_info.directory_extrinsic_metadata:
             # If this package loader doesn't write metadata, no need to require
             # an implementation for get_metadata_authority.
             return []
 
         authority = self.get_metadata_authority()
         fetcher = self.get_metadata_fetcher()
 
         metadata_objects = []
 
         for item in p_info.directory_extrinsic_metadata:
             metadata_objects.append(
                 RawExtrinsicMetadata(
                     target=ExtendedSWHID(
                         object_type=ExtendedObjectType.DIRECTORY, object_id=directory_id
                     ),
                     discovery_date=item.discovery_date or self.visit_date,
                     authority=authority,
                     fetcher=fetcher,
                     format=item.format,
                     metadata=item.metadata,
                     origin=self.url,
                     revision=CoreSWHID(
                         object_type=ObjectType.REVISION, object_id=revision_id
                     ),
                 )
             )
 
         return metadata_objects
 
     def _load_extrinsic_directory_metadata(
         self, p_info: TPackageInfo, revision_id: Sha1Git, directory_id: Sha1Git,
     ) -> None:
         metadata_objects = self.build_extrinsic_directory_metadata(
             p_info, revision_id, directory_id
         )
         self._load_metadata_objects(metadata_objects)
 
     def _load_metadata_objects(
         self, metadata_objects: List[RawExtrinsicMetadata]
     ) -> None:
         if not metadata_objects:
             # If this package loader doesn't write metadata, no need to require
             # an implementation for get_metadata_authority.
             return
 
         self._create_authorities(mo.authority for mo in metadata_objects)
         self._create_fetchers(mo.fetcher for mo in metadata_objects)
 
         self.storage.raw_extrinsic_metadata_add(metadata_objects)
 
     def _create_authorities(self, authorities: Iterable[MetadataAuthority]) -> None:
         deduplicated_authorities = {
             (authority.type, authority.url): authority for authority in authorities
         }
         if authorities:
             self.storage.metadata_authority_add(list(deduplicated_authorities.values()))
 
     def _create_fetchers(self, fetchers: Iterable[MetadataFetcher]) -> None:
         deduplicated_fetchers = {
             (fetcher.name, fetcher.version): fetcher for fetcher in fetchers
         }
         if fetchers:
             self.storage.metadata_fetcher_add(list(deduplicated_fetchers.values()))
 
     def _load_extids(self, extids: Set[ExtID]) -> None:
         if not extids:
             return
         try:
             self.storage.extid_add(list(extids))
         except Exception as e:
             logger.exception("Failed to load new ExtIDs for %s", self.url)
             sentry_sdk.capture_exception(e)
             # No big deal, it just means the next visit will load the same versions
             # again.
diff --git a/swh/loader/package/nixguix/loader.py b/swh/loader/package/nixguix/loader.py
index 961b44a..72ec878 100644
--- a/swh/loader/package/nixguix/loader.py
+++ b/swh/loader/package/nixguix/loader.py
@@ -1,333 +1,311 @@
 # Copyright (C) 2020-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import copy
 import json
 import logging
 import re
 from typing import Any, Dict, Iterator, List, Mapping, Optional, Tuple
 
 import attr
 
 from swh.loader.package.loader import (
     BasePackageInfo,
     PackageLoader,
     PartialExtID,
     RawExtrinsicMetadataCore,
 )
 from swh.loader.package.utils import EMPTY_AUTHOR, api_info, cached_method
 from swh.model import hashutil
 from swh.model.collections import ImmutableDict
 from swh.model.model import (
     MetadataAuthority,
     MetadataAuthorityType,
     Revision,
     RevisionType,
     Sha1Git,
     Snapshot,
     TargetType,
 )
 from swh.storage.interface import StorageInterface
 
 logger = logging.getLogger(__name__)
 
 EXTID_TYPE = "subresource-integrity"
 """The ExtID is an ASCII string, as defined by
 https://w3c.github.io/webappsec-subresource-integrity/"""
 
 
 @attr.s
 class NixGuixPackageInfo(BasePackageInfo):
     raw_info = attr.ib(type=Dict[str, Any])
 
     integrity = attr.ib(type=str)
     """Hash of the archive, formatted as in the Subresource Integrity
     specification."""
 
     @classmethod
     def from_metadata(cls, metadata: Dict[str, Any]) -> "NixGuixPackageInfo":
         return cls(
             url=metadata["url"],
             filename=None,
             integrity=metadata["integrity"],
             raw_info=metadata,
         )
 
     def extid(self) -> PartialExtID:
         return (EXTID_TYPE, self.integrity.encode("ascii"))
 
 
 class NixGuixLoader(PackageLoader[NixGuixPackageInfo]):
     """Load sources from a sources.json file. This loader is used to load
     sources used by functional package manager (eg. Nix and Guix).
 
     """
 
     visit_type = "nixguix"
 
     def __init__(
         self,
         storage: StorageInterface,
         url: str,
         unsupported_file_extensions: List[str] = [],
         max_content_size: Optional[int] = None,
     ):
         super().__init__(storage=storage, url=url, max_content_size=max_content_size)
         self.provider_url = url
         self.unsupported_file_extensions = unsupported_file_extensions
 
     # Note: this could be renamed get_artifacts in the PackageLoader
     # base class.
     @cached_method
     def raw_sources(self):
         return retrieve_sources(self.url)
 
     @cached_method
     def supported_sources(self):
         raw_sources = self.raw_sources()
         return clean_sources(
             parse_sources(raw_sources), self.unsupported_file_extensions
         )
 
     @cached_method
     def integrity_by_url(self) -> Dict[str, str]:
         sources = self.supported_sources()
         return {s["urls"][0]: s["integrity"] for s in sources["sources"]}
 
     def get_versions(self) -> List[str]:
         """The first mirror of the mirror list is used as branch name in the
         snapshot.
 
         """
         return list(self.integrity_by_url().keys())
 
     def get_metadata_authority(self):
         return MetadataAuthority(
             type=MetadataAuthorityType.FORGE, url=self.url, metadata={},
         )
 
     def get_extrinsic_snapshot_metadata(self):
         return [
             RawExtrinsicMetadataCore(
                 format="nixguix-sources-json", metadata=self.raw_sources(),
             ),
         ]
 
     # Note: this could be renamed get_artifact_info in the PackageLoader
     # base class.
     def get_package_info(self, url) -> Iterator[Tuple[str, NixGuixPackageInfo]]:
         # TODO: try all mirrors and not only the first one. A source
         # can be fetched from several urls, called mirrors. We
         # currently only use the first one, but if the first one
         # fails, we should try the second one and so on.
         integrity = self.integrity_by_url()[url]
         p_info = NixGuixPackageInfo.from_metadata({"url": url, "integrity": integrity})
         yield url, p_info
 
     def known_artifacts(
         self, snapshot: Optional[Snapshot]
     ) -> Dict[Sha1Git, Optional[ImmutableDict[str, object]]]:
         """Almost same implementation as the default one except it filters out the extra
         "evaluation" branch which does not have the right metadata structure.
 
         """
         if not snapshot:
             return {}
 
         # Skip evaluation revision which has no metadata
         revs = [
             rev.target
             for branch_name, rev in snapshot.branches.items()
             if (
                 rev
                 and rev.target_type == TargetType.REVISION
                 and branch_name != b"evaluation"
             )
         ]
         known_revisions = self.storage.revision_get(revs)
 
         ret = {}
         for revision in known_revisions:
             if not revision:  # revision_get can return None
                 continue
             ret[revision.id] = revision.metadata
         return ret
 
-    @staticmethod
-    def known_artifact_to_extid(known_artifact: Dict) -> Optional[PartialExtID]:
-        try:
-            value = known_artifact["extrinsic"]["raw"]["integrity"].encode("ascii")
-        except KeyError as e:
-            logger.exception(
-                "Unexpected metadata revision structure detected: %(context)s",
-                {"context": {"reason": str(e), "known_artifact": known_artifact,}},
-            )
-            # metadata field for the revision is not as expected by the loader
-            # nixguix. We consider this not the right revision and continue checking
-            # the other revisions
-            return None
-        return (EXTID_TYPE, value)
-
     def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]:
         """We add a branch to the snapshot called 'evaluation' pointing to the
         revision used to generate the sources.json file. This revision
         is specified in the sources.json file itself. For the nixpkgs
         origin, this revision is coming from the
         github.com/nixos/nixpkgs repository.
 
         Note this repository is not loaded explicitly. So, this
         pointer can target a nonexistent revision for a time. However,
         the github and gnu loaders are supposed to load this revision
         and should create the revision pointed by this branch.
 
         This branch can be used to identify the snapshot associated to
         a Nix/Guix evaluation.
 
         """
         # The revision used to create the sources.json file. For Nix,
         # this revision belongs to the github.com/nixos/nixpkgs
         # repository
         revision = self.supported_sources()["revision"]
         return {
             b"evaluation": {
                 "target_type": "revision",
                 "target": hashutil.hash_to_bytes(revision),
             }
         }
 
     def build_revision(
         self, p_info: NixGuixPackageInfo, uncompressed_path: str, directory: Sha1Git
     ) -> Optional[Revision]:
         return Revision(
             type=RevisionType.TAR,
             message=b"",
             author=EMPTY_AUTHOR,
             date=None,
             committer=EMPTY_AUTHOR,
             committer_date=None,
             parents=(),
             directory=directory,
             synthetic=True,
-            metadata={
-                "extrinsic": {
-                    "provider": self.provider_url,
-                    "when": self.visit_date.isoformat(),
-                    "raw": p_info.raw_info,
-                },
-            },
         )
 
 
 def retrieve_sources(url: str) -> bytes:
     """Retrieve sources. Potentially raise NotFound error."""
     return api_info(url, allow_redirects=True)
 
 
 def parse_sources(raw_sources: bytes) -> Dict[str, Any]:
     return json.loads(raw_sources.decode("utf-8"))
 
 
 def make_pattern_unsupported_file_extension(unsupported_file_extensions: List[str],):
     """Make a regexp pattern for unsupported file extension out of a list
     of unsupported archive extension list.
 
     """
     return re.compile(
         rf".*\.({'|'.join(map(re.escape, unsupported_file_extensions))})$", re.DOTALL
     )
 
 
 def clean_sources(
     sources: Dict[str, Any], unsupported_file_extensions=[]
 ) -> Dict[str, Any]:
     """Validate and clean the sources structure. First, ensure all top level keys are
     present. Then, walk the sources list and remove sources that do not contain required
     keys.
 
     Filter out source entries whose:
     - required keys are missing
     - source type is not supported
     - urls attribute type is not a list
     - extension is known not to be supported by the loader
 
     Raises:
         ValueError if:
         - a required top level key is missing
         - top-level version is not 1
 
     Returns:
         source Dict cleaned up
 
     """
     pattern_unsupported_file = make_pattern_unsupported_file_extension(
         unsupported_file_extensions
     )
     # Required top level keys
     required_keys = ["version", "revision", "sources"]
     missing_keys = []
     for required_key in required_keys:
         if required_key not in sources:
             missing_keys.append(required_key)
 
     if missing_keys != []:
         raise ValueError(
             f"sources structure invalid, missing: {','.join(missing_keys)}"
         )
 
     # Only the version 1 is currently supported
     version = int(sources["version"])
     if version != 1:
         raise ValueError(
             f"The sources structure version '{sources['version']}' is not supported"
         )
 
     # If a source doesn't contain required attributes, this source is
     # skipped but others could still be archived.
     verified_sources = []
     for source in sources["sources"]:
         valid = True
         required_keys = ["urls", "integrity", "type"]
         for required_key in required_keys:
             if required_key not in source:
                 logger.info(
                     f"Skip source '{source}' because key '{required_key}' is missing",
                 )
                 valid = False
 
         if valid and source["type"] != "url":
             logger.info(
                 f"Skip source '{source}' because the type {source['type']} "
                 "is not supported",
             )
             valid = False
 
         if valid and not isinstance(source["urls"], list):
             logger.info(
                 f"Skip source {source} because the urls attribute is not a list"
             )
             valid = False
 
         if valid and len(source["urls"]) > 0:  # Filter out unsupported archives
             supported_sources: List[str] = []
             for source_url in source["urls"]:
                 if pattern_unsupported_file.match(source_url):
                     logger.info(f"Skip unsupported artifact url {source_url}")
                     continue
                 supported_sources.append(source_url)
 
             if len(supported_sources) == 0:
                 logger.info(
                     f"Skip source {source} because urls only reference "
                     "unsupported artifacts. Unsupported "
                     f"artifacts so far: {pattern_unsupported_file}"
                 )
                 continue
 
             new_source = copy.deepcopy(source)
             new_source["urls"] = supported_sources
             verified_sources.append(new_source)
 
     sources["sources"] = verified_sources
     return sources
diff --git a/swh/loader/package/nixguix/tests/test_nixguix.py b/swh/loader/package/nixguix/tests/test_nixguix.py
index d30197e..5e82d5c 100644
--- a/swh/loader/package/nixguix/tests/test_nixguix.py
+++ b/swh/loader/package/nixguix/tests/test_nixguix.py
@@ -1,706 +1,606 @@
 # Copyright (C) 2020-2021 The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import json
 import logging
 import os
-from typing import Dict, List, Optional, Tuple
-from unittest.mock import patch
+from typing import Dict, Optional, Tuple
 
-import attr
 import pytest
 
 from swh.loader.package import __version__
 from swh.loader.package.archive.loader import ArchiveLoader
 from swh.loader.package.nixguix.loader import (
     NixGuixLoader,
-    NixGuixPackageInfo,
     clean_sources,
     make_pattern_unsupported_file_extension,
     parse_sources,
     retrieve_sources,
 )
 from swh.loader.package.utils import download
 from swh.loader.tests import assert_last_visit_matches
 from swh.loader.tests import check_snapshot as check_snapshot_full
 from swh.loader.tests import get_stats
 from swh.model.hashutil import hash_to_bytes
 from swh.model.identifiers import ExtendedObjectType, ExtendedSWHID
 from swh.model.model import (
     MetadataAuthority,
     MetadataAuthorityType,
     MetadataFetcher,
     RawExtrinsicMetadata,
     Snapshot,
     SnapshotBranch,
     TargetType,
 )
 from swh.storage.algos.origin import origin_get_latest_visit_status
 from swh.storage.algos.snapshot import snapshot_get_all_branches
 from swh.storage.exc import HashCollision
 from swh.storage.interface import PagedResult, StorageInterface
 
 sources_url = "https://nix-community.github.io/nixpkgs-swh/sources.json"
 
 
 @pytest.fixture
 def raw_sources(datadir) -> bytes:
     with open(
         os.path.join(
             datadir, "https_nix-community.github.io", "nixpkgs-swh_sources.json"
         ),
         "rb",
     ) as f:
         return f.read()
 
 
 SNAPSHOT1 = Snapshot(
     id=hash_to_bytes("0c5881c74283793ebe9a09a105a9381e41380383"),
     branches={
         b"evaluation": SnapshotBranch(
             target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"),
             target_type=TargetType.REVISION,
         ),
         b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch(
             target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"),
             target_type=TargetType.REVISION,
         ),
     },
 )
 
 
 def check_snapshot(snapshot: Snapshot, storage: StorageInterface):
     # The `evaluation` branch is allowed to be unresolvable. It's possible at current
     # nixguix visit time, it is not yet visited (the git loader is in charge of its
     # visit for now). For more details, check the
     # swh.loader.package.nixguix.NixGuixLoader.extra_branches docstring.
     check_snapshot_full(
         snapshot, storage, allowed_empty=[(TargetType.REVISION, b"evaluation")]
     )
 
     assert isinstance(snapshot, Snapshot)
     # then ensure the snapshot revisions are structurally as expected
     revision_ids = []
     for name, branch in snapshot.branches.items():
         if name == b"evaluation":
             continue  # skipping that particular branch (cf. previous comment)
         if branch.target_type == TargetType.REVISION:
             revision_ids.append(branch.target)
 
     revisions = storage.revision_get(revision_ids)
     for rev in revisions:
         assert rev is not None
         metadata = rev.metadata
-        assert metadata is not None
-        raw = metadata["extrinsic"]["raw"]
-        assert "url" in raw
-        assert "integrity" in raw
+        assert not metadata
 
 
 def test_retrieve_sources(swh_storage, requests_mock_datadir):
     j = parse_sources(retrieve_sources(sources_url))
     assert "sources" in j.keys()
     assert len(j["sources"]) == 2
 
 
 def test_nixguix_url_not_found(swh_storage, requests_mock_datadir):
     """When failing to read from the url, the visit is marked as not_found.
 
     Here the sources url does not exist, so requests_mock_datadir returns a 404.
     Resulting in a NotFound raised within the package loader's main loop.
 
     This results in the task with status failed and a visit_status with status
     "not_found".
 
     """
     unknown_url = "https://non-existing-url/"
     loader = NixGuixLoader(swh_storage, unknown_url)
     # during the retrieval step
     load_status = loader.load()
 
     assert load_status == {"status": "failed"}
 
     assert_last_visit_matches(
         swh_storage, unknown_url, status="not_found", type="nixguix", snapshot=None
     )
 
     assert len(requests_mock_datadir.request_history) == 1
     assert requests_mock_datadir.request_history[0].url == unknown_url
 
 
 def test_nixguix_url_with_decoding_error(swh_storage, requests_mock_datadir):
     """Other errors during communication with the url, the visit is marked as failed
 
     requests_mock_datadir will intercept the requests to sources_url. Since the file
     exists, returns a 200 with the requested content of the query. As file.txt is no
     json, fails do decode and raises a JSONDecodeError. In effect failing the visit.
 
     """
     sources_url = "https://example.com/file.txt"
     loader = NixGuixLoader(swh_storage, sources_url)
     load_status = loader.load()
 
     assert load_status == {"status": "failed"}
 
     assert_last_visit_matches(
         swh_storage, sources_url, status="failed", type="nixguix", snapshot=None
     )
 
     assert len(requests_mock_datadir.request_history) == 1
     assert requests_mock_datadir.request_history[0].url == sources_url
 
 
 def test_clean_sources_invalid_schema(swh_storage, requests_mock_datadir):
     sources = {}
     with pytest.raises(ValueError, match="sources structure invalid, missing: .*"):
         clean_sources(sources)
 
 
 def test_clean_sources_invalid_version(swh_storage, requests_mock_datadir):
     for version_ok in [1, "1"]:  # Check those versions are fine
         clean_sources({"version": version_ok, "sources": [], "revision": "my-revision"})
 
     for version_ko in [0, "0", 2, "2"]:  # Check version != 1 raise an error
         with pytest.raises(
             ValueError, match="sources structure version .* is not supported"
         ):
             clean_sources(
                 {"version": version_ko, "sources": [], "revision": "my-revision"}
             )
 
 
 def test_clean_sources_invalid_sources(swh_storage, requests_mock_datadir):
     valid_sources = [
         # 1 valid source
         {"type": "url", "urls": ["my-url.tar.gz"], "integrity": "my-integrity"},
     ]
     sources = {
         "version": 1,
         "sources": valid_sources
         + [
             # integrity is missing
             {"type": "url", "urls": ["my-url.tgz"],},
             # urls is not a list
             {"type": "url", "urls": "my-url.zip", "integrity": "my-integrity"},
             # type is not url
             {"type": "git", "urls": ["my-url.zip"], "integrity": "my-integrity"},
             # missing fields which got double-checked nonetheless...
             {"integrity": "my-integrity"},
         ],
         "revision": "my-revision",
     }
     clean = clean_sources(sources)
 
     assert len(clean["sources"]) == len(valid_sources)
 
 
 def test_make_pattern_unsupported_file_extension():
     unsupported_extensions = ["el", "c", "txt"]
     supported_extensions = ["Z", "7z"]  # for test
 
     actual_unsupported_pattern = make_pattern_unsupported_file_extension(
         unsupported_extensions
     )
 
     for supported_ext in supported_extensions:
         assert supported_ext not in unsupported_extensions
 
         supported_filepath = f"anything.{supported_ext}"
         actual_match = actual_unsupported_pattern.match(supported_filepath)
         assert not actual_match
 
     for unsupported_ext in unsupported_extensions:
         unsupported_filepath = f"something.{unsupported_ext}"
         actual_match = actual_unsupported_pattern.match(unsupported_filepath)
         assert actual_match
 
 
 def test_clean_sources_unsupported_artifacts(swh_storage, requests_mock_datadir):
     unsupported_file_extensions = [
         "iso",
         "whl",
         "gem",
         "pom",
         "msi",
         "pod",
         "png",
         "rock",
         "ttf",
         "jar",
         "c",
         "el",
         "rpm",
         "diff",
         "patch",
     ]
     supported_sources = [
         {
             "type": "url",
             "urls": [f"https://server.org/my-url.{ext}"],
             "integrity": "my-integrity",
         }
         for ext in [
             "known-unknown-but-ok",  # this is fine as well with the current approach
             "zip",
             "tar.gz",
             "tgz",
             "tar.bz2",
             "tbz",
             "tbz2",
             "tar.xz",
             "tar",
             "zip",
             "7z",
             "Z",
         ]
     ]
 
     unsupported_sources = [
         {
             "type": "url",
             "urls": [f"https://server.org/my-url.{ext}"],
             "integrity": "my-integrity",
         }
         for ext in unsupported_file_extensions
     ]
 
     sources = {
         "version": 1,
         "sources": supported_sources + unsupported_sources,
         "revision": "my-revision",
     }
 
     clean = clean_sources(sources, unsupported_file_extensions)
 
     assert len(clean["sources"]) == len(supported_sources)
 
 
 def test_loader_one_visit(swh_storage, requests_mock_datadir, raw_sources):
     loader = NixGuixLoader(swh_storage, sources_url)
     res = loader.load()
     assert res["status"] == "eventful"
 
     stats = get_stats(swh_storage)
     assert {
         "content": 1,
         "directory": 3,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 1,
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     # The visit is partial because urls pointing to non tarball file
     # are not handled yet
     assert_last_visit_matches(
         swh_storage, sources_url, status="partial", type="nixguix"
     )
 
     visit_status = origin_get_latest_visit_status(swh_storage, sources_url)
     snapshot_swhid = ExtendedSWHID(
         object_type=ExtendedObjectType.SNAPSHOT, object_id=visit_status.snapshot
     )
     metadata_authority = MetadataAuthority(
         type=MetadataAuthorityType.FORGE, url=sources_url,
     )
     expected_metadata = [
         RawExtrinsicMetadata(
             target=snapshot_swhid,
             authority=metadata_authority,
             fetcher=MetadataFetcher(
                 name="swh.loader.package.nixguix.loader.NixGuixLoader",
                 version=__version__,
             ),
             discovery_date=loader.visit_date,
             format="nixguix-sources-json",
             metadata=raw_sources,
             origin=sources_url,
         )
     ]
     assert swh_storage.raw_extrinsic_metadata_get(
         snapshot_swhid, metadata_authority,
     ) == PagedResult(next_page_token=None, results=expected_metadata,)
 
 
 def test_uncompress_failure(swh_storage, requests_mock_datadir):
     """Non tarball files are currently not supported and the uncompress
     function fails on such kind of files.
 
     However, even in this case of failure (because of the url
     https://example.com/file.txt), a snapshot and a visit has to be
     created (with a status partial since all files are not archived).
 
     """
     loader = NixGuixLoader(swh_storage, sources_url)
     loader_status = loader.load()
 
     sources = loader.supported_sources()["sources"]
     urls = [s["urls"][0] for s in sources]
     assert "https://example.com/file.txt" in urls
     assert loader_status["status"] == "eventful"
 
     # The visit is partial because urls pointing to non tarball files
     # are not handled yet
     assert_last_visit_matches(
         swh_storage, sources_url, status="partial", type="nixguix"
     )
 
 
 def test_loader_incremental(swh_storage, requests_mock_datadir):
     """Ensure a second visit do not download artifact already
     downloaded by the previous visit.
 
     """
     loader = NixGuixLoader(swh_storage, sources_url)
     load_status = loader.load()
 
     loader.load()
     assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()}
 
     assert_last_visit_matches(
         swh_storage,
         sources_url,
         status="partial",
         type="nixguix",
         snapshot=SNAPSHOT1.id,
     )
 
     check_snapshot(SNAPSHOT1, storage=swh_storage)
 
     urls = [
         m.url
         for m in requests_mock_datadir.request_history
         if m.url == ("https://github.com/owner-1/repository-1/revision-1.tgz")
     ]
     # The artifact
     # 'https://github.com/owner-1/repository-1/revision-1.tgz' is only
     # visited one time
     assert len(urls) == 1
 
 
 def test_loader_two_visits(swh_storage, requests_mock_datadir_visits):
     """To ensure there is only one origin, but two visits, two revisions
     and two snapshots are created.
 
     The first visit creates a snapshot containing one tarball. The
     second visit creates a snapshot containing the same tarball and
     another tarball.
 
     """
     loader = NixGuixLoader(swh_storage, sources_url)
     load_status = loader.load()
     assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()}
 
     assert_last_visit_matches(
         swh_storage,
         sources_url,
         status="partial",
         type="nixguix",
         snapshot=SNAPSHOT1.id,
     )
 
     check_snapshot(SNAPSHOT1, storage=swh_storage)
 
     stats = get_stats(swh_storage)
     assert {
         "content": 1,
         "directory": 3,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 1,
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     loader = NixGuixLoader(swh_storage, sources_url)
     load_status = loader.load()
     expected_snapshot_id_hex = "b0bfa75cbd0cc90aac3b9e95fb0f59c731176d97"
     expected_snapshot_id = hash_to_bytes(expected_snapshot_id_hex)
     assert load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id_hex,
     }
 
     assert_last_visit_matches(
         swh_storage,
         sources_url,
         status="partial",
         type="nixguix",
         snapshot=expected_snapshot_id,
     )
 
     # This ensures visits are incremental. Indeed, if we request a
     # second time an url, because of the requests_mock_datadir_visits
     # fixture, the file has to end with `_visit1`.
     expected_snapshot = Snapshot(
         id=expected_snapshot_id,
         branches={
             b"evaluation": SnapshotBranch(
                 target=hash_to_bytes("602140776b2ce6c9159bcf52ada73a297c063d5e"),
                 target_type=TargetType.REVISION,
             ),
             b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch(
                 target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"),
                 target_type=TargetType.REVISION,
             ),
             b"https://github.com/owner-2/repository-1/revision-1.tgz": SnapshotBranch(
                 target=hash_to_bytes("85e0bad74e33e390aaeb74f139853ae3863ee544"),
                 target_type=TargetType.REVISION,
             ),
         },
     )
     check_snapshot(expected_snapshot, storage=swh_storage)
 
     stats = get_stats(swh_storage)
     assert {
         "content": 2,
         "directory": 5,
         "origin": 1,
         "origin_visit": 2,
         "release": 0,
         "revision": 2,
         "skipped_content": 0,
         "snapshot": 2,
     } == stats
 
 
-def test_resolve_revision_from_artifacts(swh_storage, requests_mock_datadir, datadir):
-    loader = NixGuixLoader(swh_storage, sources_url)
-
-    known_artifacts = {
-        "id1": {"extrinsic": {"raw": {"url": "url1", "integrity": "integrity1"}}},
-        "id2": {"extrinsic": {"raw": {"url": "url2", "integrity": "integrity2"}}},
-    }
-
-    p_info = NixGuixPackageInfo.from_metadata(
-        {"url": "url1", "integrity": "integrity1"}
-    )
-    assert loader.resolve_revision_from_artifacts(known_artifacts, p_info) == "id1"
-    p_info = NixGuixPackageInfo.from_metadata(
-        {"url": "url3", "integrity": "integrity3"}
-    )
-    assert loader.resolve_revision_from_artifacts(known_artifacts, p_info) is None
-
-
 def test_evaluation_branch(swh_storage, requests_mock_datadir):
     loader = NixGuixLoader(swh_storage, sources_url)
     res = loader.load()
     assert res["status"] == "eventful"
 
     assert_last_visit_matches(
         swh_storage,
         sources_url,
         status="partial",
         type="nixguix",
         snapshot=SNAPSHOT1.id,
     )
 
     check_snapshot(SNAPSHOT1, storage=swh_storage)
 
 
 def test_eoferror(swh_storage, requests_mock_datadir):
     """Load a truncated archive which is invalid to make the uncompress
     function raising the exception EOFError. We then check if a
     snapshot is created, meaning this error is well managed.
 
     """
     sources = (
         "https://nix-community.github.io/nixpkgs-swh/sources-EOFError.json"  # noqa
     )
     loader = NixGuixLoader(swh_storage, sources)
     loader.load()
 
     expected_snapshot = Snapshot(
         id=hash_to_bytes("4257fa2350168c6bfec726a06452ea27a2c0cb33"),
         branches={
             b"evaluation": SnapshotBranch(
                 target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"),
                 target_type=TargetType.REVISION,
             ),
         },
     )
 
     check_snapshot(expected_snapshot, storage=swh_storage)
 
 
 def fake_download(
     url: str,
     dest: str,
     hashes: Dict = {},
     filename: Optional[str] = None,
     auth: Optional[Tuple[str, str]] = None,
 ) -> Tuple[str, Dict]:
     """Fake download which raises HashCollision (for the sake of test simpliciy,
     let's accept that makes sense)
 
     For tests purpose only.
 
     """
     if url == "https://example.com/file.txt":
         # instead of failing because it's a file not dealt with by the nix guix
         # loader, make it raise a hash collision
         raise HashCollision("sha1", "f92d74e3874587aaf443d1db961d4e26dde13e9c", [])
     return download(url, dest, hashes, filename, auth)
 
 
 def test_raise_exception(swh_storage, requests_mock_datadir, mocker):
     mock_download = mocker.patch("swh.loader.package.loader.download")
     mock_download.side_effect = fake_download
 
     loader = NixGuixLoader(swh_storage, sources_url)
     res = loader.load()
 
     assert res == {
         "status": "eventful",
         "snapshot_id": SNAPSHOT1.id.hex(),
     }
 
     check_snapshot(SNAPSHOT1, storage=swh_storage)
 
     assert len(mock_download.mock_calls) == 2
 
     # The visit is partial because some artifact downloads failed
     assert_last_visit_matches(
         swh_storage, sources_url, status="partial", type="nixguix"
     )
 
 
 def test_load_nixguix_one_common_artifact_from_other_loader(
     swh_storage, datadir, requests_mock_datadir_visits, caplog
 ):
     """Misformatted revision should be caught and logged, then loading continues
 
     """
     caplog.set_level(logging.ERROR, "swh.loader.package.nixguix.loader")
 
     # 1. first ingest with for example the archive loader
     gnu_url = "https://ftp.gnu.org/gnu/8sync/"
     release = "0.1.0"
     artifact_url = f"https://ftp.gnu.org/gnu/8sync/8sync-{release}.tar.gz"
     gnu_artifacts = [
         {
             "time": 944729610,
             "url": artifact_url,
             "length": 221837,
             "filename": f"8sync-{release}.tar.gz",
             "version": release,
         }
     ]
     archive_loader = ArchiveLoader(swh_storage, url=gnu_url, artifacts=gnu_artifacts)
     actual_load_status = archive_loader.load()
     expected_snapshot_id = "c419397fd912039825ebdbea378bc6283f006bf5"
     assert actual_load_status["status"] == "eventful"
     assert actual_load_status["snapshot_id"] == expected_snapshot_id  # noqa
 
     assert_last_visit_matches(
         archive_loader.storage, gnu_url, status="full", type="tar"
     )
 
-    gnu_snapshot: Snapshot = snapshot_get_all_branches(
-        archive_loader.storage, hash_to_bytes(expected_snapshot_id)
-    )
-
-    first_revision = gnu_snapshot.branches[f"releases/{release}".encode("utf-8")]
-
     # 2. Then ingest with the nixguix loader which lists the same artifact within its
     # sources.json
 
     # ensure test setup is ok
     data_sources = os.path.join(
         datadir, "https_nix-community.github.io", "nixpkgs-swh_sources_special.json"
     )
     all_sources = json.loads(open(data_sources).read())
     found = False
     for source in all_sources["sources"]:
         if source["urls"][0] == artifact_url:
             found = True
             assert (
                 found is True
             ), f"test setup error: {artifact_url} must be in {data_sources}"
 
     # first visit with a snapshot, ok
     sources_url = "https://nix-community.github.io/nixpkgs-swh/sources_special.json"
     loader = NixGuixLoader(swh_storage, sources_url)
     actual_load_status2 = loader.load()
     assert actual_load_status2["status"] == "eventful"
 
     assert_last_visit_matches(swh_storage, sources_url, status="full", type="nixguix")
 
     snapshot_id = actual_load_status2["snapshot_id"]
     snapshot = snapshot_get_all_branches(swh_storage, hash_to_bytes(snapshot_id))
     assert snapshot
-
-    # 3. Then ingest again with the nixguix loader, with a different snapshot
-    #    and different source
-
-    # simulate a snapshot already seen with a revision with the wrong metadata structure
-    # This revision should be skipped, thus making the artifact being ingested again.
-    with patch(
-        "swh.loader.package.loader.PackageLoader.last_snapshot"
-    ) as last_snapshot:
-        # mutate the snapshot to target a revision with the wrong metadata structure
-        # snapshot["branches"][artifact_url.encode("utf-8")] = first_revision
-        old_revision = swh_storage.revision_get([first_revision.target])[0]
-        # assert that revision is not in the right format
-        assert old_revision.metadata["extrinsic"]["raw"].get("integrity", {}) == {}
-
-        # mutate snapshot to create a clash
-        snapshot = attr.evolve(
-            snapshot,
-            branches={
-                **snapshot.branches,
-                artifact_url.encode("utf-8"): SnapshotBranch(
-                    target_type=TargetType.REVISION,
-                    target=hash_to_bytes(old_revision.id),
-                ),
-            },
-        )
-
-        # modify snapshot to actually change revision metadata structure so we simulate
-        # a revision written by somebody else (structure different)
-        last_snapshot.return_value = snapshot
-
-        loader = NixGuixLoader(swh_storage, sources_url)
-        actual_load_status3 = loader.load()
-        assert last_snapshot.called
-        assert actual_load_status3["status"] == "eventful"
-
-        assert_last_visit_matches(
-            swh_storage, sources_url, status="full", type="nixguix"
-        )
-
-        new_snapshot_id = "32ff641e510aceefc3a6d0dcbf208b2854d2e965"
-        assert actual_load_status3["snapshot_id"] == new_snapshot_id
-
-        last_snapshot = snapshot_get_all_branches(
-            swh_storage, hash_to_bytes(new_snapshot_id)
-        )
-        new_revision_branch = last_snapshot.branches[artifact_url.encode("utf-8")]
-        assert new_revision_branch.target_type == TargetType.REVISION
-
-        new_revision = swh_storage.revision_get([new_revision_branch.target])[0]
-
-        # the new revision has the correct structure,  so it got ingested alright by the
-        # new run
-        assert new_revision.metadata["extrinsic"]["raw"]["integrity"] is not None
-
-        actual_detections: List[Dict] = []
-        for record in caplog.records:
-            logtext = record.getMessage()
-            if "Unexpected metadata revision structure detected:" in logtext:
-                actual_detections.append(record.args["context"])
-
-        expected_detections = [
-            {"reason": "'integrity'", "known_artifact": old_revision.metadata,},
-        ]
-
-        # less calls than there are sources listed in the sources.json;
-        # as some of them are skipped using the ExtID from a previous run
-        assert len(expected_detections) <= len(all_sources["sources"])
-
-        assert actual_detections == expected_detections
diff --git a/swh/loader/package/npm/loader.py b/swh/loader/package/npm/loader.py
index 7ddb819..65d37d1 100644
--- a/swh/loader/package/npm/loader.py
+++ b/swh/loader/package/npm/loader.py
@@ -1,351 +1,295 @@
 # Copyright (C) 2019-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from codecs import BOM_UTF8
 import json
 import logging
 import os
 from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Union
 from urllib.parse import quote
 
 import attr
 import chardet
 
 from swh.loader.package.loader import (
     BasePackageInfo,
     PackageLoader,
     PartialExtID,
     RawExtrinsicMetadataCore,
 )
 from swh.loader.package.utils import api_info, cached_method, release_name
 from swh.model.hashutil import hash_to_bytes
 from swh.model.model import (
     MetadataAuthority,
     MetadataAuthorityType,
     Person,
     Revision,
     RevisionType,
     Sha1Git,
     TimestampWithTimezone,
 )
 from swh.storage.interface import StorageInterface
 
 logger = logging.getLogger(__name__)
 
 
 EMPTY_PERSON = Person(fullname=b"", name=None, email=None)
 
 
 EXTID_TYPE = "npm-archive-sha1"
 
 
 @attr.s
 class NpmPackageInfo(BasePackageInfo):
     raw_info = attr.ib(type=Dict[str, Any])
 
     date = attr.ib(type=Optional[str])
     shasum = attr.ib(type=str)
     """sha1 checksum"""
     version = attr.ib(type=str)
 
     @classmethod
     def from_metadata(
         cls, project_metadata: Dict[str, Any], version: str
     ) -> "NpmPackageInfo":
         package_metadata = project_metadata["versions"][version]
         url = package_metadata["dist"]["tarball"]
 
         # No date available in intrinsic metadata: retrieve it from the API
         # metadata, using the version number that the API claims this package
         # has.
         extrinsic_version = package_metadata["version"]
 
         if "time" in project_metadata:
             date = project_metadata["time"][extrinsic_version]
         elif "mtime" in package_metadata:
             date = package_metadata["mtime"]
         else:
             date = None
 
         return cls(
             url=url,
             filename=os.path.basename(url),
             date=date,
             shasum=package_metadata["dist"]["shasum"],
             version=extrinsic_version,
             raw_info=package_metadata,
             directory_extrinsic_metadata=[
                 RawExtrinsicMetadataCore(
                     format="replicate-npm-package-json",
                     metadata=json.dumps(package_metadata).encode(),
                 )
             ],
         )
 
     def extid(self) -> PartialExtID:
         return (EXTID_TYPE, hash_to_bytes(self.shasum))
 
 
 class NpmLoader(PackageLoader[NpmPackageInfo]):
     """Load npm origin's artifact releases into swh archive.
 
     """
 
     visit_type = "npm"
 
     def __init__(
         self,
         storage: StorageInterface,
         url: str,
         max_content_size: Optional[int] = None,
     ):
         """Constructor
 
         Args
             str: origin url (e.g. https://www.npmjs.com/package/<package-name>)
         """
         super().__init__(storage=storage, url=url, max_content_size=max_content_size)
         package_name = url.split("https://www.npmjs.com/package/")[1]
         safe_name = quote(package_name, safe="")
         self.provider_url = f"https://replicate.npmjs.com/{safe_name}/"
         self._info: Dict[str, Any] = {}
         self._versions = None
 
     @cached_method
     def _raw_info(self) -> bytes:
         return api_info(self.provider_url)
 
     @cached_method
     def info(self) -> Dict:
         """Return the project metadata information (fetched from npm registry)
 
         """
         return json.loads(self._raw_info())
 
     def get_versions(self) -> Sequence[str]:
         return sorted(list(self.info()["versions"].keys()))
 
     def get_default_version(self) -> str:
         return self.info()["dist-tags"].get("latest", "")
 
     def get_metadata_authority(self):
         return MetadataAuthority(
             type=MetadataAuthorityType.FORGE, url="https://npmjs.com/", metadata={},
         )
 
     def get_package_info(self, version: str) -> Iterator[Tuple[str, NpmPackageInfo]]:
         p_info = NpmPackageInfo.from_metadata(
             project_metadata=self.info(), version=version
         )
         yield release_name(version), p_info
 
-    @staticmethod
-    def known_artifact_to_extid(known_artifact: Dict) -> Optional[PartialExtID]:
-        extid_str = _artifact_to_sha1(known_artifact)
-        if extid_str is None:
-            return None
-        try:
-            return (EXTID_TYPE, hash_to_bytes(extid_str))
-        except ValueError:
-            return None
-
     def build_revision(
         self, p_info: NpmPackageInfo, uncompressed_path: str, directory: Sha1Git
     ) -> Optional[Revision]:
         i_metadata = extract_intrinsic_metadata(uncompressed_path)
         if not i_metadata:
             return None
         author = extract_npm_package_author(i_metadata)
         message = i_metadata["version"].encode("ascii")
 
         if p_info.date is None:
             url = p_info.url
             artifact_name = os.path.basename(url)
             raise ValueError(
                 "Origin %s: Cannot determine upload time for artifact %s."
                 % (p_info.url, artifact_name)
             )
 
         date = TimestampWithTimezone.from_iso8601(p_info.date)
 
         # FIXME: this is to remain bug-compatible with earlier versions:
         date = attr.evolve(date, timestamp=attr.evolve(date.timestamp, microseconds=0))
 
         r = Revision(
             type=RevisionType.TAR,
             message=message,
             author=author,
             date=date,
             committer=author,
             committer_date=date,
             parents=(),
             directory=directory,
             synthetic=True,
-            metadata={
-                "intrinsic": {"tool": "package.json", "raw": i_metadata,},
-                "extrinsic": {
-                    "provider": self.provider_url,
-                    "when": self.visit_date.isoformat(),
-                    "raw": p_info.raw_info,
-                },
-            },
         )
         return r
 
 
-def _artifact_to_sha1(known_artifact: Dict) -> Optional[str]:
-    """Returns the sha1 from an NPM 'original_artifact' dict
-
-    The following code allows to deal with 2 metadata formats:
-
-    - old format sample::
-
-        {
-            'package_source': {
-                'sha1': '05181c12cd8c22035dd31155656826b85745da37',
-            }
-        }
-
-    - new format sample::
-
-        {
-            'original_artifact': [{
-                'checksums': {
-                    'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa
-                    ...
-                },
-            }],
-            ...
-        }
-
-    """
-    known_original_artifact = known_artifact.get("original_artifact")
-    if not known_original_artifact:
-        # previous loader-npm version kept original artifact elsewhere
-        known_original_artifact = known_artifact.get("package_source")
-        if not known_original_artifact:
-            return None
-        return known_original_artifact["sha1"]
-    else:
-        assert isinstance(known_original_artifact, list)
-        return known_original_artifact[0]["checksums"]["sha1"]
-
-
 def _author_str(author_data: Union[Dict, List, str]) -> str:
     """Parse author from package.json author fields
 
     """
     if isinstance(author_data, dict):
         author_str = ""
         name = author_data.get("name")
         if name is not None:
             if isinstance(name, str):
                 author_str += name
             elif isinstance(name, list):
                 author_str += _author_str(name[0]) if len(name) > 0 else ""
         email = author_data.get("email")
         if email is not None:
             author_str += f" <{email}>"
         result = author_str
     elif isinstance(author_data, list):
         result = _author_str(author_data[0]) if len(author_data) > 0 else ""
     else:
         result = author_data
     return result
 
 
 def extract_npm_package_author(package_json: Dict[str, Any]) -> Person:
     """
     Extract package author from a ``package.json`` file content and
     return it in swh format.
 
     Args:
         package_json: Dict holding the content of parsed
             ``package.json`` file
 
     Returns:
         Person
 
     """
     for author_key in ("author", "authors"):
         if author_key in package_json:
             author_data = package_json[author_key]
             if author_data is None:
                 return EMPTY_PERSON
             author_str = _author_str(author_data)
             return Person.from_fullname(author_str.encode())
 
     return EMPTY_PERSON
 
 
 def _lstrip_bom(s, bom=BOM_UTF8):
     if s.startswith(bom):
         return s[len(bom) :]
     else:
         return s
 
 
 def load_json(json_bytes):
     """
     Try to load JSON from bytes and return a dictionary.
 
     First try to decode from utf-8. If the decoding failed,
     try to detect the encoding and decode again with replace
     error handling.
 
     If JSON is malformed, an empty dictionary will be returned.
 
     Args:
         json_bytes (bytes): binary content of a JSON file
 
     Returns:
         dict: JSON data loaded in a dictionary
     """
     json_data = {}
     try:
         json_str = _lstrip_bom(json_bytes).decode("utf-8")
     except UnicodeDecodeError:
         encoding = chardet.detect(json_bytes)["encoding"]
         if encoding:
             json_str = json_bytes.decode(encoding, "replace")
     try:
         json_data = json.loads(json_str)
     except json.decoder.JSONDecodeError:
         pass
     return json_data
 
 
 def extract_intrinsic_metadata(dir_path: str) -> Dict:
     """Given an uncompressed path holding the pkginfo file, returns a
        pkginfo parsed structure as a dict.
 
        The release artifact contains at their root one folder. For example:
        $ tar tvf zprint-0.0.6.tar.gz
        drwxr-xr-x root/root         0 2018-08-22 11:01 zprint-0.0.6/
        ...
 
     Args:
 
         dir_path (str): Path to the uncompressed directory
                         representing a release artifact from npm.
 
     Returns:
         the pkginfo parsed structure as a dict if any or None if
         none was present.
 
     """
     # Retrieve the root folder of the archive
     if not os.path.exists(dir_path):
         return {}
     lst = os.listdir(dir_path)
     if len(lst) == 0:
         return {}
     project_dirname = lst[0]
     package_json_path = os.path.join(dir_path, project_dirname, "package.json")
     if not os.path.exists(package_json_path):
         return {}
     with open(package_json_path, "rb") as package_json_file:
         package_json_bytes = package_json_file.read()
         return load_json(package_json_bytes)
diff --git a/swh/loader/package/npm/tests/test_npm.py b/swh/loader/package/npm/tests/test_npm.py
index ce4d9f0..084a864 100644
--- a/swh/loader/package/npm/tests/test_npm.py
+++ b/swh/loader/package/npm/tests/test_npm.py
@@ -1,691 +1,624 @@
 # Copyright (C) 2019-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import json
 import os
 
 import pytest
 
 from swh.loader.package import __version__
 from swh.loader.package.npm.loader import (
     NpmLoader,
     _author_str,
     extract_npm_package_author,
 )
-from swh.loader.package.tests.common import check_metadata_paths
 from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
 from swh.model.hashutil import hash_to_bytes
 from swh.model.identifiers import (
     CoreSWHID,
     ExtendedObjectType,
     ExtendedSWHID,
     ObjectType,
 )
 from swh.model.model import (
     MetadataAuthority,
     MetadataAuthorityType,
     MetadataFetcher,
     Person,
     RawExtrinsicMetadata,
     Snapshot,
     SnapshotBranch,
     TargetType,
 )
 from swh.storage.interface import PagedResult
 
 
 @pytest.fixture
 def org_api_info(datadir) -> bytes:
     with open(os.path.join(datadir, "https_replicate.npmjs.com", "org"), "rb",) as f:
         return f.read()
 
 
 def test_npm_author_str():
     for author, expected_author in [
         ("author", "author"),
         (
             ["Al from quantum leap", "hal from 2001 space odyssey"],
             "Al from quantum leap",
         ),
         ([], ""),
         ({"name": "groot", "email": "groot@galaxy.org",}, "groot <groot@galaxy.org>"),
         ({"name": "somebody",}, "somebody"),
         ({"email": "no@one.org"}, " <no@one.org>"),  # note first elt is an extra blank
         ({"name": "no one", "email": None,}, "no one"),
         ({"email": None,}, ""),
         ({"name": None}, ""),
         ({"name": None, "email": None,}, ""),
         ({}, ""),
         (None, None),
         ({"name": []}, "",),
         (
             {"name": ["Susan McSween", "William H. Bonney", "Doc Scurlock",]},
             "Susan McSween",
         ),
         (None, None),
     ]:
         assert _author_str(author) == expected_author
 
 
 def test_npm_extract_npm_package_author(datadir):
     package_metadata_filepath = os.path.join(
         datadir, "https_replicate.npmjs.com", "org_visit1"
     )
 
     with open(package_metadata_filepath) as json_file:
         package_metadata = json.load(json_file)
 
     extract_npm_package_author(package_metadata["versions"]["0.0.2"]) == Person(
         fullname=b"mooz <stillpedant@gmail.com>",
         name=b"mooz",
         email=b"stillpedant@gmail.com",
     )
 
     assert extract_npm_package_author(package_metadata["versions"]["0.0.3"]) == Person(
         fullname=b"Masafumi Oyamada <stillpedant@gmail.com>",
         name=b"Masafumi Oyamada",
         email=b"stillpedant@gmail.com",
     )
 
     package_json = json.loads(
         """
     {
         "name": "highlightjs-line-numbers.js",
         "version": "2.7.0",
         "description": "Highlight.js line numbers plugin.",
         "main": "src/highlightjs-line-numbers.js",
         "dependencies": {},
         "devDependencies": {
             "gulp": "^4.0.0",
             "gulp-rename": "^1.4.0",
             "gulp-replace": "^0.6.1",
             "gulp-uglify": "^1.2.0"
         },
         "repository": {
             "type": "git",
             "url": "https://github.com/wcoder/highlightjs-line-numbers.js.git"
         },
         "author": "Yauheni Pakala <evgeniy.pakalo@gmail.com>",
         "license": "MIT",
         "bugs": {
             "url": "https://github.com/wcoder/highlightjs-line-numbers.js/issues"
         },
         "homepage": "http://wcoder.github.io/highlightjs-line-numbers.js/"
     }"""
     )
 
     assert extract_npm_package_author(package_json) == Person(
         fullname=b"Yauheni Pakala <evgeniy.pakalo@gmail.com>",
         name=b"Yauheni Pakala",
         email=b"evgeniy.pakalo@gmail.com",
     )
 
     package_json = json.loads(
         """
     {
         "name": "3-way-diff",
         "version": "0.0.1",
         "description": "3-way diffing of JavaScript objects",
         "main": "index.js",
         "authors": [
             {
                 "name": "Shawn Walsh",
                 "url": "https://github.com/shawnpwalsh"
             },
             {
                 "name": "Markham F Rollins IV",
                 "url": "https://github.com/mrollinsiv"
             }
         ],
         "keywords": [
             "3-way diff",
             "3 way diff",
             "three-way diff",
             "three way diff"
         ],
         "devDependencies": {
             "babel-core": "^6.20.0",
             "babel-preset-es2015": "^6.18.0",
             "mocha": "^3.0.2"
         },
         "dependencies": {
             "lodash": "^4.15.0"
         }
     }"""
     )
 
     assert extract_npm_package_author(package_json) == Person(
         fullname=b"Shawn Walsh", name=b"Shawn Walsh", email=None
     )
 
     package_json = json.loads(
         """
     {
         "name": "yfe-ynpm",
         "version": "1.0.0",
         "homepage": "http://gitlab.ywwl.com/yfe/yfe-ynpm",
         "repository": {
             "type": "git",
             "url": "git@gitlab.ywwl.com:yfe/yfe-ynpm.git"
         },
         "author": [
             "fengmk2 <fengmk2@gmail.com> (https://fengmk2.com)",
             "xufuzi <xufuzi@ywwl.com> (https://7993.org)"
         ],
         "license": "MIT"
     }"""
     )
 
     assert extract_npm_package_author(package_json) == Person(
         fullname=b"fengmk2 <fengmk2@gmail.com> (https://fengmk2.com)",
         name=b"fengmk2",
         email=b"fengmk2@gmail.com",
     )
 
     package_json = json.loads(
         """
     {
         "name": "umi-plugin-whale",
         "version": "0.0.8",
         "description": "Internal contract component",
         "authors": {
             "name": "xiaohuoni",
             "email": "448627663@qq.com"
         },
         "repository": "alitajs/whale",
         "devDependencies": {
             "np": "^3.0.4",
             "umi-tools": "*"
         },
         "license": "MIT"
     }"""
     )
 
     assert extract_npm_package_author(package_json) == Person(
         fullname=b"xiaohuoni <448627663@qq.com>",
         name=b"xiaohuoni",
         email=b"448627663@qq.com",
     )
 
     package_json_no_authors = json.loads(
         """{
         "authors": null,
         "license": "MIT"
     }"""
     )
 
     assert extract_npm_package_author(package_json_no_authors) == Person(
         fullname=b"", name=None, email=None
     )
 
 
 def normalize_hashes(hashes):
     if isinstance(hashes, str):
         return hash_to_bytes(hashes)
     if isinstance(hashes, list):
         return [hash_to_bytes(x) for x in hashes]
     return {hash_to_bytes(k): hash_to_bytes(v) for k, v in hashes.items()}
 
 
 _expected_new_contents_first_visit = normalize_hashes(
     [
         "4ce3058e16ab3d7e077f65aabf855c34895bf17c",
         "858c3ceee84c8311adc808f8cdb30d233ddc9d18",
         "0fa33b4f5a4e0496da6843a38ff1af8b61541996",
         "85a410f8ef8eb8920f2c384a9555566ad4a2e21b",
         "9163ac8025923d5a45aaac482262893955c9b37b",
         "692cf623b8dd2c5df2c2998fd95ae4ec99882fb4",
         "18c03aac6d3e910efb20039c15d70ab5e0297101",
         "41265c42446aac17ca769e67d1704f99e5a1394d",
         "783ff33f5882813dca9239452c4a7cadd4dba778",
         "b029cfb85107aee4590c2434a3329bfcf36f8fa1",
         "112d1900b4c2e3e9351050d1b542c9744f9793f3",
         "5439bbc4bd9a996f1a38244e6892b71850bc98fd",
         "d83097a2f994b503185adf4e719d154123150159",
         "d0939b4898e83090ee55fd9d8a60e312cfadfbaf",
         "b3523a26f7147e4af40d9d462adaae6d49eda13e",
         "cd065fb435d6fb204a8871bcd623d0d0e673088c",
         "2854a40855ad839a54f4b08f5cff0cf52fca4399",
         "b8a53bbaac34ebb8c6169d11a4b9f13b05c583fe",
         "0f73d56e1cf480bded8a1ecf20ec6fc53c574713",
         "0d9882b2dfafdce31f4e77fe307d41a44a74cefe",
         "585fc5caab9ead178a327d3660d35851db713df1",
         "e8cd41a48d79101977e3036a87aeb1aac730686f",
         "5414efaef33cceb9f3c9eb5c4cc1682cd62d14f7",
         "9c3cc2763bf9e9e37067d3607302c4776502df98",
         "3649a68410e354c83cd4a38b66bd314de4c8f5c9",
         "e96ed0c091de1ebdf587104eaf63400d1974a1fe",
         "078ca03d2f99e4e6eab16f7b75fbb7afb699c86c",
         "38de737da99514de6559ff163c988198bc91367a",
     ]
 )
 
 _expected_new_directories_first_visit = normalize_hashes(
     [
         "3370d20d6f96dc1c9e50f083e2134881db110f4f",
         "42753c0c2ab00c4501b552ac4671c68f3cf5aece",
         "d7895533ef5edbcffdea3f057d9fef3a1ef845ce",
         "80579be563e2ef3e385226fe7a3f079b377f142c",
         "3b0ddc6a9e58b4b53c222da4e27b280b6cda591c",
         "bcad03ce58ac136f26f000990fc9064e559fe1c0",
         "5fc7e82a1bc72e074665c6078c6d3fad2f13d7ca",
         "e3cd26beba9b1e02f6762ef54bd9ac80cc5f25fd",
         "584b5b4b6cf7f038095e820b99386a9c232de931",
         "184c8d6d0d242f2b1792ef9d3bf396a5434b7f7a",
         "bb5f4ee143c970367eb409f2e4c1104898048b9d",
         "1b95491047add1103db0dfdfa84a9735dcb11e88",
         "a00c6de13471a2d66e64aca140ddb21ef5521e62",
         "5ce6c1cd5cda2d546db513aaad8c72a44c7771e2",
         "c337091e349b6ac10d38a49cdf8c2401ef9bb0f2",
         "202fafcd7c0f8230e89d5496ad7f44ab12b807bf",
         "775cc516543be86c15c1dc172f49c0d4e6e78235",
         "ff3d1ead85a14f891e8b3fa3a89de39db1b8de2e",
     ]
 )
 
 _expected_new_revisions_first_visit = normalize_hashes(
     {
         "d8a1c7474d2956ac598a19f0f27d52f7015f117e": (
             "42753c0c2ab00c4501b552ac4671c68f3cf5aece"
         ),
         "5f9eb78af37ffd12949f235e86fac04898f9f72a": (
             "3370d20d6f96dc1c9e50f083e2134881db110f4f"
         ),
         "ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a": (
             "d7895533ef5edbcffdea3f057d9fef3a1ef845ce"
         ),
     }
 )
 
 
 def package_url(package):
     return "https://www.npmjs.com/package/%s" % package
 
 
 def package_metadata_url(package):
     return "https://replicate.npmjs.com/%s/" % package
 
 
-def test_npm_revision_metadata_structure(swh_storage, requests_mock_datadir):
-    package = "org"
-    loader = NpmLoader(swh_storage, package_url(package))
-
-    actual_load_status = loader.load()
-    assert actual_load_status["status"] == "eventful"
-    assert actual_load_status["snapshot_id"] is not None
-
-    expected_revision_id = hash_to_bytes("d8a1c7474d2956ac598a19f0f27d52f7015f117e")
-    revision = swh_storage.revision_get([expected_revision_id])[0]
-    assert revision is not None
-
-    check_metadata_paths(
-        revision.metadata,
-        paths=[
-            ("intrinsic.tool", str),
-            ("intrinsic.raw", dict),
-            ("extrinsic.provider", str),
-            ("extrinsic.when", str),
-            ("extrinsic.raw", dict),
-            ("original_artifact", list),
-        ],
-    )
-
-    for original_artifact in revision.metadata["original_artifact"]:
-        check_metadata_paths(
-            original_artifact,
-            paths=[("filename", str), ("length", int), ("checksums", dict),],
-        )
-
-
 def test_npm_loader_first_visit(swh_storage, requests_mock_datadir, org_api_info):
     package = "org"
     url = package_url(package)
     loader = NpmLoader(swh_storage, url)
 
     actual_load_status = loader.load()
     expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d")
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id.hex(),
     }
 
     assert_last_visit_matches(
         swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id
     )
 
     stats = get_stats(swh_storage)
 
     assert {
         "content": len(_expected_new_contents_first_visit),
         "directory": len(_expected_new_directories_first_visit),
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": len(_expected_new_revisions_first_visit),
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     contents = swh_storage.content_get(_expected_new_contents_first_visit)
     count = sum(0 if content is None else 1 for content in contents)
     assert count == len(_expected_new_contents_first_visit)
 
     assert (
         list(swh_storage.directory_missing(_expected_new_directories_first_visit)) == []
     )
 
     assert list(swh_storage.revision_missing(_expected_new_revisions_first_visit)) == []
 
     versions = [
         ("0.0.2", "d8a1c7474d2956ac598a19f0f27d52f7015f117e"),
         ("0.0.3", "5f9eb78af37ffd12949f235e86fac04898f9f72a"),
         ("0.0.4", "ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a"),
     ]
 
     expected_snapshot = Snapshot(
         id=expected_snapshot_id,
         branches={
             b"HEAD": SnapshotBranch(
                 target=b"releases/0.0.4", target_type=TargetType.ALIAS
             ),
             **{
                 b"releases/"
                 + version_name.encode(): SnapshotBranch(
                     target=hash_to_bytes(version_id), target_type=TargetType.REVISION,
                 )
                 for (version_name, version_id) in versions
             },
         },
     )
     check_snapshot(expected_snapshot, swh_storage)
 
     metadata_authority = MetadataAuthority(
         type=MetadataAuthorityType.FORGE, url="https://npmjs.com/",
     )
 
     for (version_name, revision_id) in versions:
         revision = swh_storage.revision_get([hash_to_bytes(revision_id)])[0]
         directory_id = revision.directory
         directory_swhid = ExtendedSWHID(
             object_type=ExtendedObjectType.DIRECTORY, object_id=directory_id,
         )
         revision_swhid = CoreSWHID(
             object_type=ObjectType.REVISION, object_id=hash_to_bytes(revision_id),
         )
         expected_metadata = [
             RawExtrinsicMetadata(
                 target=directory_swhid,
                 authority=metadata_authority,
                 fetcher=MetadataFetcher(
                     name="swh.loader.package.npm.loader.NpmLoader", version=__version__,
                 ),
                 discovery_date=loader.visit_date,
                 format="replicate-npm-package-json",
                 metadata=json.dumps(
                     json.loads(org_api_info)["versions"][version_name]
                 ).encode(),
                 origin="https://www.npmjs.com/package/org",
                 revision=revision_swhid,
             )
         ]
         assert swh_storage.raw_extrinsic_metadata_get(
             directory_swhid, metadata_authority,
         ) == PagedResult(next_page_token=None, results=expected_metadata,)
 
 
 def test_npm_loader_incremental_visit(swh_storage, requests_mock_datadir_visits):
     package = "org"
     url = package_url(package)
     loader = NpmLoader(swh_storage, url)
 
     expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d")
     actual_load_status = loader.load()
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id.hex(),
     }
     assert_last_visit_matches(
         swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id
     )
 
     stats = get_stats(swh_storage)
 
     assert {
         "content": len(_expected_new_contents_first_visit),
         "directory": len(_expected_new_directories_first_visit),
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": len(_expected_new_revisions_first_visit),
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     # reset loader internal state
     del loader._cached_info
     del loader._cached__raw_info
 
     actual_load_status2 = loader.load()
     assert actual_load_status2["status"] == "eventful"
     snap_id2 = actual_load_status2["snapshot_id"]
     assert snap_id2 is not None
     assert snap_id2 != actual_load_status["snapshot_id"]
 
     assert_last_visit_matches(swh_storage, url, status="full", type="npm")
 
     stats = get_stats(swh_storage)
 
     assert {  # 3 new releases artifacts
         "content": len(_expected_new_contents_first_visit) + 14,
         "directory": len(_expected_new_directories_first_visit) + 15,
         "origin": 1,
         "origin_visit": 2,
         "release": 0,
         "revision": len(_expected_new_revisions_first_visit) + 3,
         "skipped_content": 0,
         "snapshot": 2,
     } == stats
 
     urls = [
         m.url
         for m in requests_mock_datadir_visits.request_history
         if m.url.startswith("https://registry.npmjs.org")
     ]
     assert len(urls) == len(set(urls))  # we visited each artifact once across
 
 
 @pytest.mark.usefixtures("requests_mock_datadir")
 def test_npm_loader_version_divergence(swh_storage):
     package = "@aller_shared"
     url = package_url(package)
     loader = NpmLoader(swh_storage, url)
 
     actual_load_status = loader.load()
     expected_snapshot_id = hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92")
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id.hex(),
     }
     assert_last_visit_matches(
         swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id
     )
 
     stats = get_stats(swh_storage)
 
     assert {  # 1 new releases artifacts
         "content": 534,
         "directory": 153,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 2,
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     expected_snapshot = Snapshot(
         id=expected_snapshot_id,
         branches={
             b"HEAD": SnapshotBranch(
                 target_type=TargetType.ALIAS, target=b"releases/0.1.0"
             ),
             b"releases/0.1.0": SnapshotBranch(
                 target_type=TargetType.REVISION,
                 target=hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"),
             ),
             b"releases/0.1.1-alpha.14": SnapshotBranch(
                 target_type=TargetType.REVISION,
                 target=hash_to_bytes("05181c12cd8c22035dd31155656826b85745da37"),
             ),
         },
     )
     check_snapshot(expected_snapshot, swh_storage)
 
 
-def test_npm__known_artifact_to_extid__old_loader_version():
-    """Current loader version should parse old metadata scheme
-
-    """
-    assert (
-        NpmLoader.known_artifact_to_extid(
-            {"package_source": {"sha1": "something-wrong"}}
-        )
-        is None
-    )
-
-    sha1 = "05181c12cd8c22035dd31155656826b85745da37"
-    assert NpmLoader.known_artifact_to_extid({"package_source": {"sha1": sha1,}}) == (
-        "npm-archive-sha1",
-        hash_to_bytes(sha1),
-    )
-
-
-def test_npm__known_artifact_to_extid__current_loader_version():
-    """Current loader version should be able to parse current metadata scheme
-
-    """
-    sha1 = "05181c12cd8c22035dd31155656826b85745da37"
-    assert NpmLoader.known_artifact_to_extid(
-        {"original_artifact": [{"checksums": {"sha1": sha1},}],}
-    ) == ("npm-archive-sha1", hash_to_bytes(sha1))
-
-    assert (
-        NpmLoader.known_artifact_to_extid(
-            {"original_artifact": [{"checksums": {"sha1": "something-wrong"},}],},
-        )
-        is None
-    )
-
-
 def test_npm_artifact_with_no_intrinsic_metadata(swh_storage, requests_mock_datadir):
     """Skip artifact with no intrinsic metadata during ingestion
 
     """
     package = "nativescript-telerik-analytics"
     url = package_url(package)
     loader = NpmLoader(swh_storage, url)
 
     actual_load_status = loader.load()
     # no branch as one artifact without any intrinsic metadata
     expected_snapshot = Snapshot(
         id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={},
     )
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot.id.hex(),
     }
 
     check_snapshot(expected_snapshot, swh_storage)
 
     assert_last_visit_matches(
         swh_storage, url, status="full", type="npm", snapshot=expected_snapshot.id
     )
 
 
 def test_npm_artifact_with_no_upload_time(swh_storage, requests_mock_datadir):
     """With no time upload, artifact is skipped
 
     """
     package = "jammit-no-time"
     url = package_url(package)
     loader = NpmLoader(swh_storage, url)
 
     actual_load_status = loader.load()
     # no branch as one artifact without any intrinsic metadata
     expected_snapshot = Snapshot(
         id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={},
     )
     assert actual_load_status == {
         "status": "uneventful",
         "snapshot_id": expected_snapshot.id.hex(),
     }
 
     check_snapshot(expected_snapshot, swh_storage)
 
     assert_last_visit_matches(
         swh_storage, url, status="partial", type="npm", snapshot=expected_snapshot.id
     )
 
 
 def test_npm_artifact_use_mtime_if_no_time(swh_storage, requests_mock_datadir):
     """With no time upload, artifact is skipped
 
     """
     package = "jammit-express"
     url = package_url(package)
     loader = NpmLoader(swh_storage, url)
 
     actual_load_status = loader.load()
     expected_snapshot_id = hash_to_bytes("d6e08e19159f77983242877c373c75222d5ae9dd")
 
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id.hex(),
     }
 
     # artifact is used
     expected_snapshot = Snapshot(
         id=expected_snapshot_id,
         branches={
             b"HEAD": SnapshotBranch(
                 target_type=TargetType.ALIAS, target=b"releases/0.0.1"
             ),
             b"releases/0.0.1": SnapshotBranch(
                 target_type=TargetType.REVISION,
                 target=hash_to_bytes("9e4dd2b40d1b46b70917c0949aa2195c823a648e"),
             ),
         },
     )
     check_snapshot(expected_snapshot, swh_storage)
 
     assert_last_visit_matches(
         swh_storage, url, status="full", type="npm", snapshot=expected_snapshot.id
     )
 
 
 def test_npm_no_artifact(swh_storage, requests_mock_datadir):
     """If no artifacts at all is found for origin, the visit fails completely
 
     """
     package = "catify"
     url = package_url(package)
     loader = NpmLoader(swh_storage, url)
     actual_load_status = loader.load()
     assert actual_load_status == {
         "status": "failed",
     }
 
     assert_last_visit_matches(swh_storage, url, status="failed", type="npm")
 
 
 def test_npm_origin_not_found(swh_storage, requests_mock_datadir):
     url = package_url("non-existent-url")
     loader = NpmLoader(swh_storage, url)
 
     assert loader.load() == {"status": "failed"}
 
     assert_last_visit_matches(
         swh_storage, url, status="not_found", type="npm", snapshot=None
     )
diff --git a/swh/loader/package/pypi/loader.py b/swh/loader/package/pypi/loader.py
index e63ee7a..adba58f 100644
--- a/swh/loader/package/pypi/loader.py
+++ b/swh/loader/package/pypi/loader.py
@@ -1,301 +1,236 @@
 # Copyright (C) 2019-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import json
 import logging
 import os
 from typing import Any, Dict, Iterator, Optional, Sequence, Tuple
 from urllib.parse import urlparse
 
 import attr
 from pkginfo import UnpackedSDist
 
 from swh.loader.package.loader import (
     BasePackageInfo,
     PackageLoader,
     PartialExtID,
     RawExtrinsicMetadataCore,
 )
 from swh.loader.package.utils import EMPTY_AUTHOR, api_info, cached_method, release_name
 from swh.model.hashutil import hash_to_bytes
 from swh.model.model import (
     MetadataAuthority,
     MetadataAuthorityType,
     Person,
     Revision,
     RevisionType,
     Sha1Git,
     TimestampWithTimezone,
 )
 from swh.storage.interface import StorageInterface
 
 logger = logging.getLogger(__name__)
 
 
 EXTID_TYPE = "pypi-archive-sha256"
 
 
 @attr.s
 class PyPIPackageInfo(BasePackageInfo):
     raw_info = attr.ib(type=Dict[str, Any])
 
     comment_text = attr.ib(type=Optional[str])
     sha256 = attr.ib(type=str)
     upload_time = attr.ib(type=str)
 
     @classmethod
     def from_metadata(cls, metadata: Dict[str, Any]) -> "PyPIPackageInfo":
         return cls(
             url=metadata["url"],
             filename=metadata["filename"],
             raw_info=metadata,
             comment_text=metadata.get("comment_text"),
             sha256=metadata["digests"]["sha256"],
             upload_time=metadata["upload_time"],
             directory_extrinsic_metadata=[
                 RawExtrinsicMetadataCore(
                     format="pypi-project-json", metadata=json.dumps(metadata).encode(),
                 )
             ],
         )
 
     def extid(self) -> PartialExtID:
         return (EXTID_TYPE, hash_to_bytes(self.sha256))
 
 
 class PyPILoader(PackageLoader[PyPIPackageInfo]):
     """Load pypi origin's artifact releases into swh archive.
 
     """
 
     visit_type = "pypi"
 
     def __init__(
         self,
         storage: StorageInterface,
         url: str,
         max_content_size: Optional[int] = None,
     ):
         super().__init__(storage=storage, url=url, max_content_size=max_content_size)
         self.provider_url = pypi_api_url(self.url)
 
     @cached_method
     def _raw_info(self) -> bytes:
         return api_info(self.provider_url)
 
     @cached_method
     def info(self) -> Dict:
         """Return the project metadata information (fetched from pypi registry)
 
         """
         return json.loads(self._raw_info())
 
     def get_versions(self) -> Sequence[str]:
         return self.info()["releases"].keys()
 
     def get_default_version(self) -> str:
         return self.info()["info"]["version"]
 
     def get_metadata_authority(self):
         p_url = urlparse(self.url)
         return MetadataAuthority(
             type=MetadataAuthorityType.FORGE,
             url=f"{p_url.scheme}://{p_url.netloc}/",
             metadata={},
         )
 
     def get_package_info(self, version: str) -> Iterator[Tuple[str, PyPIPackageInfo]]:
         res = []
         for meta in self.info()["releases"][version]:
             if meta["packagetype"] != "sdist":
                 continue
             p_info = PyPIPackageInfo.from_metadata(meta)
             res.append((version, p_info))
 
         if len(res) == 1:
             version, p_info = res[0]
             yield release_name(version), p_info
         else:
             for version, p_info in res:
                 yield release_name(version, p_info.filename), p_info
 
-    @staticmethod
-    def known_artifact_to_extid(known_artifact: Dict) -> Optional[PartialExtID]:
-        extid_str = _artifact_to_sha256(known_artifact)
-        if extid_str is None:
-            return None
-        try:
-            return (EXTID_TYPE, hash_to_bytes(extid_str)) if extid_str else None
-        except ValueError:
-            return None
-
     def build_revision(
         self, p_info: PyPIPackageInfo, uncompressed_path: str, directory: Sha1Git
     ) -> Optional[Revision]:
         i_metadata = extract_intrinsic_metadata(uncompressed_path)
         if not i_metadata:
             return None
 
         # from intrinsic metadata
         name = i_metadata["version"]
         _author = author(i_metadata)
 
         # from extrinsic metadata
         message = p_info.comment_text or ""
         message = "%s: %s" % (name, message) if message else name
         date = TimestampWithTimezone.from_iso8601(p_info.upload_time)
 
         return Revision(
             type=RevisionType.TAR,
             message=message.encode("utf-8"),
             author=_author,
             date=date,
             committer=_author,
             committer_date=date,
             parents=(),
             directory=directory,
             synthetic=True,
-            metadata={
-                "intrinsic": {"tool": "PKG-INFO", "raw": i_metadata,},
-                "extrinsic": {
-                    "provider": self.provider_url,
-                    "when": self.visit_date.isoformat(),
-                    "raw": p_info.raw_info,
-                },
-            },
-        )
-
-
-def _artifact_to_sha256(known_artifact: Dict) -> Optional[str]:
-    """Returns the sha256 from a PyPI 'original_artifact' dict
-
-    The following code allows to deal with 2 metadata formats (column metadata
-    in 'revision')
-
-    - old format sample::
-
-        {
-            'original_artifact': {
-                'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec',  # noqa
-                ...
-            },
-            ...
-        }
-
-    - new format sample::
-
-        {
-            'original_artifact': [{
-                'checksums': {
-                    'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec',  # noqa
-                    ...
-                },
-            }],
-            ...
-        }
-
-    """
-    original_artifact = known_artifact["original_artifact"]
-    if isinstance(original_artifact, dict):
-        # previous loader-pypi version stored metadata as dict
-        return original_artifact["sha256"]
-    # new pypi loader actually store metadata dict differently...
-    assert isinstance(original_artifact, list)
-    # current loader-pypi stores metadata as list of dict
-    if len(known_artifact["original_artifact"]) == 0:
-        return None
-    elif len(known_artifact["original_artifact"]) == 1:
-        return original_artifact[0]["checksums"]["sha256"]
-    else:
-        raise ValueError(
-            f"Expected exactly one PyPI original_artifact, got "
-            f"{len(known_artifact['original_artifact'])}"
         )
 
 
 def pypi_api_url(url: str) -> str:
     """Compute api url from a project url
 
     Args:
         url (str): PyPI instance's url (e.g: https://pypi.org/project/requests)
         This deals with correctly transforming the project's api url (e.g
         https://pypi.org/pypi/requests/json)
 
     Returns:
         api url
 
     """
     p_url = urlparse(url)
     project_name = p_url.path.rstrip("/").split("/")[-1]
     url = "%s://%s/pypi/%s/json" % (p_url.scheme, p_url.netloc, project_name)
     return url
 
 
 def extract_intrinsic_metadata(dir_path: str) -> Dict:
     """Given an uncompressed path holding the pkginfo file, returns a
        pkginfo parsed structure as a dict.
 
        The release artifact contains at their root one folder. For example:
        $ tar tvf zprint-0.0.6.tar.gz
        drwxr-xr-x root/root         0 2018-08-22 11:01 zprint-0.0.6/
        ...
 
     Args:
 
         dir_path (str): Path to the uncompressed directory
                         representing a release artifact from pypi.
 
     Returns:
         the pkginfo parsed structure as a dict if any or None if
         none was present.
 
     """
     # Retrieve the root folder of the archive
     if not os.path.exists(dir_path):
         return {}
     lst = os.listdir(dir_path)
     if len(lst) != 1:
         return {}
     project_dirname = lst[0]
     pkginfo_path = os.path.join(dir_path, project_dirname, "PKG-INFO")
     if not os.path.exists(pkginfo_path):
         return {}
     pkginfo = UnpackedSDist(pkginfo_path)
     raw = pkginfo.__dict__
     raw.pop("filename")  # this gets added with the ondisk location
     return raw
 
 
 def author(data: Dict) -> Person:
     """Given a dict of project/release artifact information (coming from
        PyPI), returns an author subset.
 
     Args:
         data (dict): Representing either artifact information or
                      release information.
 
     Returns:
         swh-model dict representing a person.
 
     """
     name = data.get("author")
     email = data.get("author_email")
     fullname = None  # type: Optional[str]
 
     if email:
         fullname = "%s <%s>" % (name, email)
     else:
         fullname = name
 
     if not fullname:
         return EMPTY_AUTHOR
 
     if name is not None:
         name = name.encode("utf-8")
 
     if email is not None:
         email = email.encode("utf-8")
 
     return Person(fullname=fullname.encode("utf-8"), name=name, email=email)
diff --git a/swh/loader/package/pypi/tests/test_pypi.py b/swh/loader/package/pypi/tests/test_pypi.py
index 9da895b..0ca7312 100644
--- a/swh/loader/package/pypi/tests/test_pypi.py
+++ b/swh/loader/package/pypi/tests/test_pypi.py
@@ -1,876 +1,811 @@
 # Copyright (C) 2019-2021 The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import json
 import os
 from os import path
 from unittest.mock import patch
 
 import pytest
 
 from swh.core.pytest_plugin import requests_mock_datadir_factory
 from swh.core.tarball import uncompress
 from swh.loader.package import __version__
 from swh.loader.package.pypi.loader import (
     PyPILoader,
     author,
     extract_intrinsic_metadata,
     pypi_api_url,
 )
-from swh.loader.package.tests.common import check_metadata_paths
 from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
 from swh.model.hashutil import hash_to_bytes
 from swh.model.identifiers import (
     CoreSWHID,
     ExtendedObjectType,
     ExtendedSWHID,
     ObjectType,
 )
 from swh.model.model import (
     MetadataAuthority,
     MetadataAuthorityType,
     MetadataFetcher,
     Person,
     RawExtrinsicMetadata,
     Snapshot,
     SnapshotBranch,
     TargetType,
 )
 from swh.storage.interface import PagedResult
 
 
 @pytest.fixture
 def _0805nexter_api_info(datadir) -> bytes:
     with open(
         os.path.join(datadir, "https_pypi.org", "pypi_0805nexter_json"), "rb",
     ) as f:
         return f.read()
 
 
 def test_pypi_author_basic():
     data = {
         "author": "i-am-groot",
         "author_email": "iam@groot.org",
     }
     actual_author = author(data)
 
     expected_author = Person(
         fullname=b"i-am-groot <iam@groot.org>",
         name=b"i-am-groot",
         email=b"iam@groot.org",
     )
 
     assert actual_author == expected_author
 
 
 def test_pypi_author_empty_email():
     data = {
         "author": "i-am-groot",
         "author_email": "",
     }
     actual_author = author(data)
 
     expected_author = Person(fullname=b"i-am-groot", name=b"i-am-groot", email=b"",)
 
     assert actual_author == expected_author
 
 
 def test_pypi_author_empty_name():
     data = {
         "author": "",
         "author_email": "iam@groot.org",
     }
     actual_author = author(data)
 
     expected_author = Person(
         fullname=b" <iam@groot.org>", name=b"", email=b"iam@groot.org",
     )
 
     assert actual_author == expected_author
 
 
 def test_pypi_author_malformed():
     data = {
         "author": "['pierre', 'paul', 'jacques']",
         "author_email": None,
     }
 
     actual_author = author(data)
 
     expected_author = Person(
         fullname=b"['pierre', 'paul', 'jacques']",
         name=b"['pierre', 'paul', 'jacques']",
         email=None,
     )
 
     assert actual_author == expected_author
 
 
 def test_pypi_author_malformed_2():
     data = {
         "author": "[marie, jeanne]",
         "author_email": "[marie@some, jeanne@thing]",
     }
 
     actual_author = author(data)
 
     expected_author = Person(
         fullname=b"[marie, jeanne] <[marie@some, jeanne@thing]>",
         name=b"[marie, jeanne]",
         email=b"[marie@some, jeanne@thing]",
     )
 
     assert actual_author == expected_author
 
 
 def test_pypi_author_malformed_3():
     data = {
         "author": "[marie, jeanne, pierre]",
         "author_email": "[marie@somewhere.org, jeanne@somewhere.org]",
     }
 
     actual_author = author(data)
 
     expected_author = Person(
         fullname=(
             b"[marie, jeanne, pierre] " b"<[marie@somewhere.org, jeanne@somewhere.org]>"
         ),
         name=b"[marie, jeanne, pierre]",
         email=b"[marie@somewhere.org, jeanne@somewhere.org]",
     )
 
     actual_author == expected_author
 
 
 # configuration error #
 
 
 def test_pypi_api_url():
     """Compute pypi api url from the pypi project url should be ok"""
     url = pypi_api_url("https://pypi.org/project/requests")
     assert url == "https://pypi.org/pypi/requests/json"
 
 
 def test_pypi_api_url_with_slash():
     """Compute pypi api url from the pypi project url should be ok"""
     url = pypi_api_url("https://pypi.org/project/requests/")
     assert url == "https://pypi.org/pypi/requests/json"
 
 
 @pytest.mark.fs
 def test_pypi_extract_intrinsic_metadata(tmp_path, datadir):
     """Parsing existing archive's PKG-INFO should yield results"""
     uncompressed_archive_path = str(tmp_path)
     archive_path = path.join(
         datadir, "https_files.pythonhosted.org", "0805nexter-1.1.0.zip"
     )
     uncompress(archive_path, dest=uncompressed_archive_path)
 
     actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path)
     expected_metadata = {
         "metadata_version": "1.0",
         "name": "0805nexter",
         "version": "1.1.0",
         "summary": "a simple printer of nested lest",
         "home_page": "http://www.hp.com",
         "author": "hgtkpython",
         "author_email": "2868989685@qq.com",
         "platforms": ["UNKNOWN"],
     }
 
     assert actual_metadata == expected_metadata
 
 
 @pytest.mark.fs
 def test_pypi_extract_intrinsic_metadata_failures(tmp_path):
     """Parsing inexistent path/archive/PKG-INFO yield None"""
     tmp_path = str(tmp_path)  # py3.5 work around (PosixPath issue)
     # inexistent first level path
     assert extract_intrinsic_metadata("/something-inexistent") == {}
     # inexistent second level path (as expected by pypi archives)
     assert extract_intrinsic_metadata(tmp_path) == {}
     # inexistent PKG-INFO within second level path
     existing_path_no_pkginfo = path.join(tmp_path, "something")
     os.mkdir(existing_path_no_pkginfo)
     assert extract_intrinsic_metadata(tmp_path) == {}
 
 
 # LOADER SCENARIO #
 
 # "edge" cases (for the same origin) #
 
 
 # no release artifact:
 # {visit full, status: uneventful, no contents, etc...}
 requests_mock_datadir_missing_all = requests_mock_datadir_factory(
     ignore_urls=[
         "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip",  # noqa
         "https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip",  # noqa
     ]
 )
 
 
 def test_pypi_no_release_artifact(swh_storage, requests_mock_datadir_missing_all):
     """Load a pypi project with all artifacts missing ends up with no snapshot
 
     """
     url = "https://pypi.org/project/0805nexter"
     loader = PyPILoader(swh_storage, url)
 
     actual_load_status = loader.load()
     assert actual_load_status["status"] == "uneventful"
     assert actual_load_status["snapshot_id"] is not None
 
     stats = get_stats(swh_storage)
     assert {
         "content": 0,
         "directory": 0,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 0,
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     assert_last_visit_matches(swh_storage, url, status="partial", type="pypi")
 
 
 def test_pypi_fail__load_snapshot(swh_storage, requests_mock_datadir):
     """problem during loading: {visit: failed, status: failed, no snapshot}
 
     """
     url = "https://pypi.org/project/0805nexter"
     with patch(
         "swh.loader.package.pypi.loader.PyPILoader._load_snapshot",
         side_effect=ValueError("Fake problem to fail visit"),
     ):
         loader = PyPILoader(swh_storage, url)
 
         actual_load_status = loader.load()
         assert actual_load_status == {"status": "failed"}
 
         stats = get_stats(loader.storage)
 
         assert {
             "content": 6,
             "directory": 4,
             "origin": 1,
             "origin_visit": 1,
             "release": 0,
             "revision": 2,
             "skipped_content": 0,
             "snapshot": 0,
         } == stats
 
         assert_last_visit_matches(swh_storage, url, status="failed", type="pypi")
 
 
 # problem during loading:
 # {visit: partial, status: uneventful, no snapshot}
 
 
 def test_pypi_release_with_traceback(swh_storage, requests_mock_datadir):
     url = "https://pypi.org/project/0805nexter"
     with patch(
         "swh.loader.package.pypi.loader.PyPILoader.last_snapshot",
         side_effect=ValueError("Fake problem to fail the visit"),
     ):
         loader = PyPILoader(swh_storage, url)
 
         actual_load_status = loader.load()
         assert actual_load_status == {"status": "failed"}
 
         stats = get_stats(swh_storage)
 
         assert {
             "content": 0,
             "directory": 0,
             "origin": 1,
             "origin_visit": 1,
             "release": 0,
             "revision": 0,
             "skipped_content": 0,
             "snapshot": 0,
         } == stats
 
         assert_last_visit_matches(swh_storage, url, status="failed", type="pypi")
 
 
 # problem during loading: failure early enough in between swh contents...
 # some contents (contents, directories, etc...) have been written in storage
 # {visit: partial, status: eventful, no snapshot}
 
 # problem during loading: failure late enough we can have snapshots (some
 # revisions are written in storage already)
 # {visit: partial, status: eventful, snapshot}
 
 # "normal" cases (for the same origin) #
 
 
 requests_mock_datadir_missing_one = requests_mock_datadir_factory(
     ignore_urls=[
         "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip",  # noqa
     ]
 )
 
 # some missing release artifacts:
 # {visit partial, status: eventful, 1 snapshot}
 
 
 def test_pypi_revision_metadata_structure(
     swh_storage, requests_mock_datadir, _0805nexter_api_info
 ):
     url = "https://pypi.org/project/0805nexter"
     loader = PyPILoader(swh_storage, url)
 
     actual_load_status = loader.load()
     assert actual_load_status["status"] == "eventful"
     assert actual_load_status["snapshot_id"] is not None
 
     expected_revision_id = hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21")
     revision = swh_storage.revision_get([expected_revision_id])[0]
     assert revision is not None
 
-    check_metadata_paths(
-        revision.metadata,
-        paths=[
-            ("intrinsic.tool", str),
-            ("intrinsic.raw", dict),
-            ("extrinsic.provider", str),
-            ("extrinsic.when", str),
-            ("extrinsic.raw", dict),
-            ("original_artifact", list),
-        ],
-    )
-
-    for original_artifact in revision.metadata["original_artifact"]:
-        check_metadata_paths(
-            original_artifact,
-            paths=[("filename", str), ("length", int), ("checksums", dict),],
-        )
-
     revision_swhid = CoreSWHID(
         object_type=ObjectType.REVISION, object_id=expected_revision_id
     )
     directory_swhid = ExtendedSWHID(
         object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory
     )
     metadata_authority = MetadataAuthority(
         type=MetadataAuthorityType.FORGE, url="https://pypi.org/",
     )
     expected_metadata = [
         RawExtrinsicMetadata(
             target=directory_swhid,
             authority=metadata_authority,
             fetcher=MetadataFetcher(
                 name="swh.loader.package.pypi.loader.PyPILoader", version=__version__,
             ),
             discovery_date=loader.visit_date,
             format="pypi-project-json",
             metadata=json.dumps(
                 json.loads(_0805nexter_api_info)["releases"]["1.2.0"][0]
             ).encode(),
             origin=url,
             revision=revision_swhid,
         )
     ]
     assert swh_storage.raw_extrinsic_metadata_get(
         directory_swhid, metadata_authority,
     ) == PagedResult(next_page_token=None, results=expected_metadata,)
 
 
 def test_pypi_visit_with_missing_artifact(
     swh_storage, requests_mock_datadir_missing_one
 ):
     """Load a pypi project with some missing artifacts ends up with 1 snapshot
 
     """
     url = "https://pypi.org/project/0805nexter"
     loader = PyPILoader(swh_storage, url)
 
     actual_load_status = loader.load()
     expected_snapshot_id = hash_to_bytes("dd0e4201a232b1c104433741dbf45895b8ac9355")
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id.hex(),
     }
 
     stats = get_stats(swh_storage)
 
     assert {
         "content": 3,
         "directory": 2,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 1,
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     expected_contents = map(
         hash_to_bytes,
         [
             "405859113963cb7a797642b45f171d6360425d16",
             "e5686aa568fdb1d19d7f1329267082fe40482d31",
             "83ecf6ec1114fd260ca7a833a2d165e71258c338",
         ],
     )
 
     assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
 
     expected_dirs = map(
         hash_to_bytes,
         [
             "b178b66bd22383d5f16f4f5c923d39ca798861b4",
             "c3a58f8b57433a4b56caaa5033ae2e0931405338",
         ],
     )
 
     assert list(swh_storage.directory_missing(expected_dirs)) == []
 
     # {revision hash: directory hash}
     expected_revs = {
         hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes(
             "b178b66bd22383d5f16f4f5c923d39ca798861b4"
         ),  # noqa
     }
     assert list(swh_storage.revision_missing(expected_revs)) == []
 
     expected_snapshot = Snapshot(
         id=hash_to_bytes(expected_snapshot_id),
         branches={
             b"releases/1.2.0": SnapshotBranch(
                 target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
                 target_type=TargetType.REVISION,
             ),
             b"HEAD": SnapshotBranch(
                 target=b"releases/1.2.0", target_type=TargetType.ALIAS,
             ),
         },
     )
     check_snapshot(expected_snapshot, storage=swh_storage)
 
     assert_last_visit_matches(
         swh_storage, url, status="partial", type="pypi", snapshot=expected_snapshot_id,
     )
 
 
 def test_pypi_visit_with_1_release_artifact(swh_storage, requests_mock_datadir):
     """With no prior visit, load a pypi project ends up with 1 snapshot
 
     """
     url = "https://pypi.org/project/0805nexter"
     loader = PyPILoader(swh_storage, url)
 
     actual_load_status = loader.load()
     expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a")
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id.hex(),
     }
 
     stats = get_stats(swh_storage)
     assert {
         "content": 6,
         "directory": 4,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 2,
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     expected_contents = map(
         hash_to_bytes,
         [
             "a61e24cdfdab3bb7817f6be85d37a3e666b34566",
             "938c33483285fd8ad57f15497f538320df82aeb8",
             "a27576d60e08c94a05006d2e6d540c0fdb5f38c8",
             "405859113963cb7a797642b45f171d6360425d16",
             "e5686aa568fdb1d19d7f1329267082fe40482d31",
             "83ecf6ec1114fd260ca7a833a2d165e71258c338",
         ],
     )
 
     assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
 
     expected_dirs = map(
         hash_to_bytes,
         [
             "05219ba38bc542d4345d5638af1ed56c7d43ca7d",
             "cf019eb456cf6f78d8c4674596f1c9a97ece8f44",
             "b178b66bd22383d5f16f4f5c923d39ca798861b4",
             "c3a58f8b57433a4b56caaa5033ae2e0931405338",
         ],
     )
 
     assert list(swh_storage.directory_missing(expected_dirs)) == []
 
     # {revision hash: directory hash}
     expected_revs = {
         hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes(
             "05219ba38bc542d4345d5638af1ed56c7d43ca7d"
         ),  # noqa
         hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes(
             "b178b66bd22383d5f16f4f5c923d39ca798861b4"
         ),  # noqa
     }
     assert list(swh_storage.revision_missing(expected_revs)) == []
 
     expected_snapshot = Snapshot(
         id=expected_snapshot_id,
         branches={
             b"releases/1.1.0": SnapshotBranch(
                 target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
                 target_type=TargetType.REVISION,
             ),
             b"releases/1.2.0": SnapshotBranch(
                 target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
                 target_type=TargetType.REVISION,
             ),
             b"HEAD": SnapshotBranch(
                 target=b"releases/1.2.0", target_type=TargetType.ALIAS,
             ),
         },
     )
     check_snapshot(expected_snapshot, swh_storage)
 
     assert_last_visit_matches(
         swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id
     )
 
 
 def test_pypi_multiple_visits_with_no_change(swh_storage, requests_mock_datadir):
     """Multiple visits with no changes results in 1 same snapshot
 
     """
     url = "https://pypi.org/project/0805nexter"
     loader = PyPILoader(swh_storage, url)
 
     actual_load_status = loader.load()
     snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a")
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": snapshot_id.hex(),
     }
     assert_last_visit_matches(
         swh_storage, url, status="full", type="pypi", snapshot=snapshot_id
     )
 
     stats = get_stats(swh_storage)
 
     assert {
         "content": 6,
         "directory": 4,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 2,
         "skipped_content": 0,
         "snapshot": 1,
     } == stats
 
     expected_snapshot = Snapshot(
         id=snapshot_id,
         branches={
             b"releases/1.1.0": SnapshotBranch(
                 target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
                 target_type=TargetType.REVISION,
             ),
             b"releases/1.2.0": SnapshotBranch(
                 target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
                 target_type=TargetType.REVISION,
             ),
             b"HEAD": SnapshotBranch(
                 target=b"releases/1.2.0", target_type=TargetType.ALIAS,
             ),
         },
     )
     check_snapshot(expected_snapshot, swh_storage)
 
     actual_load_status2 = loader.load()
     assert actual_load_status2 == {
         "status": "uneventful",
         "snapshot_id": actual_load_status2["snapshot_id"],
     }
 
     visit_status2 = assert_last_visit_matches(
         swh_storage, url, status="full", type="pypi"
     )
 
     stats2 = get_stats(swh_storage)
     expected_stats2 = stats.copy()
     expected_stats2["origin_visit"] = 1 + 1
     assert expected_stats2 == stats2
 
     # same snapshot
     assert visit_status2.snapshot == snapshot_id
 
 
 def test_pypi_incremental_visit(swh_storage, requests_mock_datadir_visits):
     """With prior visit, 2nd load will result with a different snapshot
 
     """
     url = "https://pypi.org/project/0805nexter"
     loader = PyPILoader(swh_storage, url)
 
     visit1_actual_load_status = loader.load()
     visit1_stats = get_stats(swh_storage)
     expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a")
     assert visit1_actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id.hex(),
     }
 
     assert_last_visit_matches(
         swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id
     )
 
     assert {
         "content": 6,
         "directory": 4,
         "origin": 1,
         "origin_visit": 1,
         "release": 0,
         "revision": 2,
         "skipped_content": 0,
         "snapshot": 1,
     } == visit1_stats
 
     # Reset internal state
     del loader._cached__raw_info
     del loader._cached_info
 
     visit2_actual_load_status = loader.load()
     visit2_stats = get_stats(swh_storage)
 
     assert visit2_actual_load_status["status"] == "eventful", visit2_actual_load_status
     expected_snapshot_id2 = hash_to_bytes("2e5149a7b0725d18231a37b342e9b7c4e121f283")
     assert visit2_actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id2.hex(),
     }
 
     assert_last_visit_matches(
         swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id2
     )
 
     assert {
         "content": 6 + 1,  # 1 more content
         "directory": 4 + 2,  # 2 more directories
         "origin": 1,
         "origin_visit": 1 + 1,
         "release": 0,
         "revision": 2 + 1,  # 1 more revision
         "skipped_content": 0,
         "snapshot": 1 + 1,  # 1 more snapshot
     } == visit2_stats
 
     expected_contents = map(
         hash_to_bytes,
         [
             "a61e24cdfdab3bb7817f6be85d37a3e666b34566",
             "938c33483285fd8ad57f15497f538320df82aeb8",
             "a27576d60e08c94a05006d2e6d540c0fdb5f38c8",
             "405859113963cb7a797642b45f171d6360425d16",
             "e5686aa568fdb1d19d7f1329267082fe40482d31",
             "83ecf6ec1114fd260ca7a833a2d165e71258c338",
             "92689fa2b7fb4d4fc6fb195bf73a50c87c030639",
         ],
     )
 
     assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
 
     expected_dirs = map(
         hash_to_bytes,
         [
             "05219ba38bc542d4345d5638af1ed56c7d43ca7d",
             "cf019eb456cf6f78d8c4674596f1c9a97ece8f44",
             "b178b66bd22383d5f16f4f5c923d39ca798861b4",
             "c3a58f8b57433a4b56caaa5033ae2e0931405338",
             "e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a",
             "52604d46843b898f5a43208045d09fcf8731631b",
         ],
     )
 
     assert list(swh_storage.directory_missing(expected_dirs)) == []
 
     # {revision hash: directory hash}
     expected_revs = {
         hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes(
             "05219ba38bc542d4345d5638af1ed56c7d43ca7d"
         ),  # noqa
         hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes(
             "b178b66bd22383d5f16f4f5c923d39ca798861b4"
         ),  # noqa
         hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"): hash_to_bytes(
             "e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a"
         ),  # noqa
     }
 
     assert list(swh_storage.revision_missing(expected_revs)) == []
 
     expected_snapshot = Snapshot(
         id=expected_snapshot_id2,
         branches={
             b"releases/1.1.0": SnapshotBranch(
                 target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
                 target_type=TargetType.REVISION,
             ),
             b"releases/1.2.0": SnapshotBranch(
                 target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
                 target_type=TargetType.REVISION,
             ),
             b"releases/1.3.0": SnapshotBranch(
                 target=hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"),
                 target_type=TargetType.REVISION,
             ),
             b"HEAD": SnapshotBranch(
                 target=b"releases/1.3.0", target_type=TargetType.ALIAS,
             ),
         },
     )
 
     check_snapshot(expected_snapshot, swh_storage)
 
     assert_last_visit_matches(
         swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id
     )
 
     urls = [
         m.url
         for m in requests_mock_datadir_visits.request_history
         if m.url.startswith("https://files.pythonhosted.org")
     ]
     # visited each artifact once across 2 visits
     assert len(urls) == len(set(urls))
 
 
 # release artifact, no new artifact
 # {visit full, status uneventful, same snapshot as before}
 
 # release artifact, old artifact with different checksums
 # {visit full, status full, new snapshot with shared history and some new
 # different history}
 
 # release with multiple sdist artifacts per pypi "version"
 # snapshot branch output is different
 
 
 def test_pypi_visit_1_release_with_2_artifacts(swh_storage, requests_mock_datadir):
     """With no prior visit, load a pypi project ends up with 1 snapshot
 
     """
     url = "https://pypi.org/project/nexter"
     loader = PyPILoader(swh_storage, url)
 
     actual_load_status = loader.load()
     expected_snapshot_id = hash_to_bytes("a27e638a4dad6fbfa273c6ebec1c4bf320fb84c6")
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id.hex(),
     }
 
     expected_snapshot = Snapshot(
         id=expected_snapshot_id,
         branches={
             b"releases/1.1.0/nexter-1.1.0.zip": SnapshotBranch(
                 target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
                 target_type=TargetType.REVISION,
             ),
             b"releases/1.1.0/nexter-1.1.0.tar.gz": SnapshotBranch(
                 target=hash_to_bytes("0bf88f5760cca7665d0af4d6575d9301134fe11a"),
                 target_type=TargetType.REVISION,
             ),
         },
     )
     check_snapshot(expected_snapshot, swh_storage)
 
     assert_last_visit_matches(
         swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id
     )
 
 
-def test_pypi__known_artifact_to_extid__old_loader_version():
-    """Current loader version should solve old metadata scheme
-
-    """
-    assert (
-        PyPILoader.known_artifact_to_extid(
-            {"original_artifact": {"sha256": "something-wrong",},}
-        )
-        is None
-    )
-
-    sha256 = "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec"
-    assert PyPILoader.known_artifact_to_extid(
-        {"original_artifact": {"sha256": sha256}}
-    ) == ("pypi-archive-sha256", hash_to_bytes(sha256))
-
-
-def test_pypi__known_artifact_to_extid__current_loader_version():
-    """Current loader version should be able to solve current metadata scheme
-
-    """
-    sha256 = "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec"
-
-    assert PyPILoader.known_artifact_to_extid(
-        {"original_artifact": [{"checksums": {"sha256": sha256,},}],}
-    ) == ("pypi-archive-sha256", hash_to_bytes(sha256))
-
-    assert (
-        PyPILoader.known_artifact_to_extid(
-            {"original_artifact": [{"checksums": {"sha256": "something-wrong"},}],},
-        )
-        is None
-    )
-
-    # there should not be more than one artifact
-    with pytest.raises(ValueError):
-        PyPILoader.known_artifact_to_extid(
-            {
-                "original_artifact": [
-                    {"checksums": {"sha256": sha256,},},
-                    {"checksums": {"sha256": sha256,},},
-                ],
-            }
-        )
-
-
 def test_pypi_artifact_with_no_intrinsic_metadata(swh_storage, requests_mock_datadir):
     """Skip artifact with no intrinsic metadata during ingestion
 
     """
     url = "https://pypi.org/project/upymenu"
     loader = PyPILoader(swh_storage, url)
 
     actual_load_status = loader.load()
     expected_snapshot_id = hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e")
     assert actual_load_status == {
         "status": "eventful",
         "snapshot_id": expected_snapshot_id.hex(),
     }
 
     # no branch as one artifact without any intrinsic metadata
     expected_snapshot = Snapshot(id=expected_snapshot_id, branches={})
     check_snapshot(expected_snapshot, swh_storage)
 
     assert_last_visit_matches(
         swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id
     )
 
 
 def test_pypi_origin_not_found(swh_storage, requests_mock_datadir):
     url = "https://pypi.org/project/unknown"
     loader = PyPILoader(swh_storage, url)
 
     assert loader.load() == {"status": "failed"}
 
     assert_last_visit_matches(
         swh_storage, url, status="not_found", type="pypi", snapshot=None
     )
diff --git a/swh/loader/package/tests/common.py b/swh/loader/package/tests/common.py
index 35e53d2..e53023d 100644
--- a/swh/loader/package/tests/common.py
+++ b/swh/loader/package/tests/common.py
@@ -1,54 +1,12 @@
 # Copyright (C) 2019-2020  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import logging
 from os import path
-from typing import Dict, List, Tuple
 
 logger = logging.getLogger(__file__)
 
 
 DATADIR = path.join(path.abspath(path.dirname(__file__)), "resources")
-
-
-def check_metadata(metadata: Dict, key_path: str, raw_type: str):
-    """Given a metadata dict, ensure the associated key_path value is of type
-       raw_type.
-
-    Args:
-        metadata: Dict to check
-        key_path: Path to check
-        raw_type: Type to check the path with
-
-    Raises:
-        Assertion error in case of mismatch
-
-    """
-    data = metadata
-    keys = key_path.split(".")
-    for k in keys:
-        try:
-            data = data[k]
-        except (TypeError, KeyError) as e:
-            # KeyError: because path too long
-            # TypeError: data is not a dict
-            raise AssertionError(e)
-    assert isinstance(data, raw_type)  # type: ignore
-
-
-def check_metadata_paths(metadata: Dict, paths: List[Tuple[str, str]]):
-    """Given a metadata dict, ensure the keys are of expected types
-
-    Args:
-        metadata: Dict to check
-        key_path: Path to check
-        raw_type: Type to check the path with
-
-    Raises:
-        Assertion error in case of mismatch
-
-    """
-    for key_path, raw_type in paths:
-        check_metadata(metadata, key_path, raw_type)
diff --git a/swh/loader/package/tests/test_common.py b/swh/loader/package/tests/test_common.py
deleted file mode 100644
index 1578fea..0000000
--- a/swh/loader/package/tests/test_common.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright (C) 2019-2020  The Software Heritage developers
-# See the AUTHORS file at the top-level directory of this distribution
-# License: GNU General Public License version 3, or any later version
-# See top-level LICENSE file for more information
-
-import pytest
-
-from swh.loader.package.tests.common import check_metadata, check_metadata_paths
-
-
-def test_check_metadata():
-    metadata = {
-        "a": {"raw": {"time": "something",},},
-        "b": [],
-        "c": 1,
-    }
-
-    for raw_path, raw_type in [
-        ("a.raw", dict),
-        ("a.raw.time", str),
-        ("b", list),
-        ("c", int),
-    ]:
-        check_metadata(metadata, raw_path, raw_type)
-
-
-def test_check_metadata_ko():
-    metadata = {
-        "a": {"raw": "hello",},
-        "b": [],
-        "c": 1,
-    }
-
-    for raw_path, raw_type in [
-        ("a.b", dict),
-        ("a.raw.time", str),
-    ]:
-        with pytest.raises(AssertionError):
-            check_metadata(metadata, raw_path, raw_type)
-
-
-def test_check_metadata_paths():
-    metadata = {
-        "a": {"raw": {"time": "something",},},
-        "b": [],
-        "c": 1,
-    }
-
-    check_metadata_paths(
-        metadata, [("a.raw", dict), ("a.raw.time", str), ("b", list), ("c", int),]
-    )
-
-
-def test_check_metadata_paths_ko():
-    metadata = {
-        "a": {"raw": "hello",},
-        "b": [],
-        "c": 1,
-    }
-
-    with pytest.raises(AssertionError):
-        check_metadata_paths(metadata, [("a.b", dict), ("a.raw.time", str),])
diff --git a/swh/loader/package/tests/test_loader.py b/swh/loader/package/tests/test_loader.py
index 11fc93c..d8f50bc 100644
--- a/swh/loader/package/tests/test_loader.py
+++ b/swh/loader/package/tests/test_loader.py
@@ -1,363 +1,318 @@
 # Copyright (C) 2019-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import datetime
 import hashlib
 import string
 from unittest.mock import Mock, call, patch
 
 import attr
 import pytest
 
 from swh.loader.package.loader import BasePackageInfo, PackageLoader
 from swh.model.identifiers import CoreSWHID, ObjectType
 from swh.model.model import (
     ExtID,
     Origin,
     OriginVisit,
     OriginVisitStatus,
     Snapshot,
     SnapshotBranch,
     TargetType,
 )
 from swh.storage import get_storage
 from swh.storage.algos.snapshot import snapshot_get_latest
 
 
 class FakeStorage:
     def origin_add(self, origins):
         raise ValueError("We refuse to add an origin")
 
     def origin_visit_get_latest(self, origin):
         return None
 
 
 class FakeStorage2(FakeStorage):
     def origin_add(self, origins):
         pass
 
     def origin_visit_add(self, visits):
         raise ValueError("We refuse to add an origin visit")
 
 
 class StubPackageInfo(BasePackageInfo):
     pass
 
 
 class StubPackageLoader(PackageLoader[StubPackageInfo]):
     def get_versions(self):
         return ["v1.0", "v2.0", "v3.0", "v4.0"]
 
     def get_package_info(self, version):
         p_info = StubPackageInfo("http://example.org", f"example-{version}.tar")
         extid_type = "extid-type1" if version in ("v1.0", "v2.0") else "extid-type2"
         # Versions 1.0 and 2.0 have an extid of a given type, v3.0 has an extid
         # of a different type
         patch.object(
             p_info,
             "extid",
             return_value=(extid_type, f"extid-of-{version}".encode()),
             autospec=True,
         ).start()
         yield (f"branch-{version}", p_info)
 
     def _load_revision(self, p_info, origin):
         return None
 
 
 def test_loader_origin_visit_failure(swh_storage):
     """Failure to add origin or origin visit should failed immediately
 
     """
     loader = PackageLoader(swh_storage, "some-url")
     loader.storage = FakeStorage()
 
     actual_load_status = loader.load()
     assert actual_load_status == {"status": "failed"}
 
     loader.storage = FakeStorage2()
 
     actual_load_status2 = loader.load()
     assert actual_load_status2 == {"status": "failed"}
 
 
-def test_resolve_revision_from_artifacts() -> None:
-    loader = PackageLoader(None, None)  # type: ignore
-    loader.known_artifact_to_extid = Mock(  # type: ignore
-        wraps=lambda known_artifact: ("extid-type", known_artifact["key"].encode())
-    )
-
-    known_artifacts = {
-        b"a" * 20: {"key": "extid-of-aaaa"},
-        b"b" * 20: {"key": "extid-of-bbbb"},
-    }
-
-    p_info = Mock(wraps=BasePackageInfo(None, None))  # type: ignore
-
-    # No known artifact -> it would be useless to compute the extid
-    assert loader.resolve_revision_from_artifacts({}, p_info) is None
-    p_info.extid.assert_not_called()
-    loader.known_artifact_to_extid.assert_not_called()
-
-    p_info.extid.reset_mock()
-
-    # Some artifacts, but the PackageInfo does not support extids
-    p_info.extid.return_value = None
-    assert loader.resolve_revision_from_artifacts(known_artifacts, p_info) is None
-    p_info.extid.assert_called_once()
-    loader.known_artifact_to_extid.assert_not_called()
-
-    p_info.extid.reset_mock()
-
-    # Some artifacts, and the PackageInfo is not one of them (ie. cache miss)
-    p_info.extid.return_value = ("extid-type", b"extid-of-cccc")
-    assert loader.resolve_revision_from_artifacts(known_artifacts, p_info) is None
-    p_info.extid.assert_called_once()
-    loader.known_artifact_to_extid.assert_any_call({"key": "extid-of-aaaa"})
-    loader.known_artifact_to_extid.assert_any_call({"key": "extid-of-bbbb"})
-
-    p_info.extid.reset_mock()
-    loader.known_artifact_to_extid.reset_mock()
-
-    # Some artifacts, and the PackageInfo is one of them (ie. cache hit)
-    p_info.extid.return_value = ("extid-type", b"extid-of-aaaa")
-    assert loader.resolve_revision_from_artifacts(known_artifacts, p_info) == b"a" * 20
-    p_info.extid.assert_called_once()
-    loader.known_artifact_to_extid.assert_called_once_with({"key": "extid-of-aaaa"})
-
-
 def test_resolve_revision_from_extids() -> None:
     loader = PackageLoader(None, None)  # type: ignore
 
     p_info = Mock(wraps=BasePackageInfo(None, None))  # type: ignore
 
     # The PackageInfo does not support extids
     p_info.extid.return_value = None
     known_extids = {
         ("extid-type", b"extid-of-aaaa"): [
             CoreSWHID(object_type=ObjectType.REVISION, object_id=b"a" * 20),
         ]
     }
     revision_whitelist = {b"unused"}
     assert (
         loader.resolve_revision_from_extids(known_extids, p_info, revision_whitelist)
         is None
     )
 
     # Some known extid, and the PackageInfo is not one of them (ie. cache miss)
     p_info.extid.return_value = ("extid-type", b"extid-of-cccc")
     assert (
         loader.resolve_revision_from_extids(known_extids, p_info, revision_whitelist)
         is None
     )
 
     # Some known extid, and the PackageInfo is one of them (ie. cache hit),
     # but the target revision was not in the previous snapshot
     p_info.extid.return_value = ("extid-type", b"extid-of-aaaa")
     assert (
         loader.resolve_revision_from_extids(known_extids, p_info, revision_whitelist)
         is None
     )
 
     # Some known extid, and the PackageInfo is one of them (ie. cache hit),
     # and the target revision was in the previous snapshot
     revision_whitelist = {b"a" * 20}
     assert (
         loader.resolve_revision_from_extids(known_extids, p_info, revision_whitelist)
         == b"a" * 20
     )
 
     # Same as before, but there is more than one extid, and only one is an allowed
     # revision
     revision_whitelist = {b"a" * 20}
     known_extids = {
         ("extid-type", b"extid-of-aaaa"): [
             CoreSWHID(object_type=ObjectType.REVISION, object_id=b"b" * 20),
             CoreSWHID(object_type=ObjectType.REVISION, object_id=b"a" * 20),
         ]
     }
     assert (
         loader.resolve_revision_from_extids(known_extids, p_info, revision_whitelist)
         == b"a" * 20
     )
 
 
 def test_load_get_known_extids() -> None:
     """Checks PackageLoader.load() fetches known extids efficiently"""
     storage = Mock(wraps=get_storage("memory"))
 
     loader = StubPackageLoader(storage, "http://example.org")
 
     loader.load()
 
     # Calls should be grouped by extid type
     storage.extid_get_from_extid.assert_has_calls(
         [
             call("extid-type1", [b"extid-of-v1.0", b"extid-of-v2.0"]),
             call("extid-type2", [b"extid-of-v3.0", b"extid-of-v4.0"]),
         ],
         any_order=True,
     )
 
 
 def test_load_extids() -> None:
     """Checks PackageLoader.load() skips iff it should, and writes (only)
     the new ExtIDs"""
     storage = get_storage("memory")
 
     origin = "http://example.org"
     rev1_swhid = CoreSWHID(object_type=ObjectType.REVISION, object_id=b"a" * 20)
     rev2_swhid = CoreSWHID(object_type=ObjectType.REVISION, object_id=b"b" * 20)
     rev3_swhid = CoreSWHID(object_type=ObjectType.REVISION, object_id=b"c" * 20)
     rev4_swhid = CoreSWHID(object_type=ObjectType.REVISION, object_id=b"d" * 20)
     dir_swhid = CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=b"e" * 20)
 
     loader = StubPackageLoader(storage, "http://example.org")
     patch.object(
         loader,
         "_load_revision",
         return_value=(rev4_swhid.object_id, dir_swhid.object_id),
         autospec=True,
     ).start()
 
     # Results of a previous load
     storage.extid_add(
         [
             ExtID("extid-type1", b"extid-of-v1.0", rev1_swhid),
             ExtID("extid-type2", b"extid-of-v2.0", rev2_swhid),
         ]
     )
     last_snapshot = Snapshot(
         branches={
             b"v1.0": SnapshotBranch(
                 target_type=TargetType.REVISION, target=rev1_swhid.object_id
             ),
             b"v2.0": SnapshotBranch(
                 target_type=TargetType.REVISION, target=rev2_swhid.object_id
             ),
             b"v3.0": SnapshotBranch(
                 target_type=TargetType.REVISION, target=rev3_swhid.object_id
             ),
         }
     )
     storage.snapshot_add([last_snapshot])
     date = datetime.datetime.now(tz=datetime.timezone.utc)
     storage.origin_add([Origin(url=origin)])
     storage.origin_visit_add(
         [OriginVisit(origin="http://example.org", visit=1, date=date, type="tar")]
     )
     storage.origin_visit_status_add(
         [
             OriginVisitStatus(
                 origin=origin,
                 visit=1,
                 status="full",
                 date=date,
                 snapshot=last_snapshot.id,
             )
         ]
     )
 
     loader.load()
 
     assert loader._load_revision.mock_calls == [  # type: ignore
         # v1.0: not loaded because there is already its (extid_type, extid, rev)
         #       in the storage.
         # v2.0: loaded, because there is already a similar extid, but different type
         call(StubPackageInfo(origin, "example-v2.0.tar"), Origin(url=origin)),
         # v3.0: loaded despite having an (extid_type, extid) in storage, because
         #       the target of the extid is not in the previous snapshot
         call(StubPackageInfo(origin, "example-v3.0.tar"), Origin(url=origin)),
         # v4.0: loaded, because there isn't its extid
         call(StubPackageInfo(origin, "example-v4.0.tar"), Origin(url=origin)),
     ]
 
     # then check the snapshot has all the branches.
     # versions 2.0 to 4.0 all point to rev4_swhid (instead of the value of the last
     # snapshot), because they had to be loaded (mismatched extid), and the mocked
     # _load_revision always returns rev4_swhid.
     snapshot = Snapshot(
         branches={
             b"branch-v1.0": SnapshotBranch(
                 target_type=TargetType.REVISION, target=rev1_swhid.object_id
             ),
             b"branch-v2.0": SnapshotBranch(
                 target_type=TargetType.REVISION, target=rev4_swhid.object_id
             ),
             b"branch-v3.0": SnapshotBranch(
                 target_type=TargetType.REVISION, target=rev4_swhid.object_id
             ),
             b"branch-v4.0": SnapshotBranch(
                 target_type=TargetType.REVISION, target=rev4_swhid.object_id
             ),
         }
     )
     assert snapshot_get_latest(storage, origin) == snapshot
 
     extids = storage.extid_get_from_target(
         ObjectType.REVISION,
         [
             rev1_swhid.object_id,
             rev2_swhid.object_id,
             rev3_swhid.object_id,
             rev4_swhid.object_id,
         ],
     )
 
     assert set(extids) == {
         # What we inserted at the beginning of the test:
         ExtID("extid-type1", b"extid-of-v1.0", rev1_swhid),
         ExtID("extid-type2", b"extid-of-v2.0", rev2_swhid),
         # Added by the loader:
         ExtID("extid-type1", b"extid-of-v2.0", rev4_swhid),
         ExtID("extid-type2", b"extid-of-v3.0", rev4_swhid),
         ExtID("extid-type2", b"extid-of-v4.0", rev4_swhid),
     }
 
 
 def test_manifest_extid():
     """Compute primary key should return the right identity
 
     """
 
     @attr.s
     class TestPackageInfo(BasePackageInfo):
         a = attr.ib()
         b = attr.ib()
         length = attr.ib()
         filename = attr.ib()
         version = attr.ib()
 
         MANIFEST_FORMAT = string.Template("$a $b")
 
     p_info = TestPackageInfo(
         url="http://example.org/",
         a=1,
         b=2,
         length=221837,
         filename="8sync-0.1.0.tar.gz",
         version="0.1.0",
     )
 
     actual_id = p_info.extid()
     assert actual_id == ("package-manifest-sha256", hashlib.sha256(b"1 2").digest())
 
 
 def test_no_env_swh_config_filename_raise(monkeypatch):
     """No SWH_CONFIG_FILENAME environment variable makes package loader init raise
 
     """
 
     class DummyPackageLoader(PackageLoader):
         """A dummy package loader for test purpose"""
 
         pass
 
     monkeypatch.delenv("SWH_CONFIG_FILENAME", raising=False)
 
     with pytest.raises(
         AssertionError, match="SWH_CONFIG_FILENAME environment variable is undefined"
     ):
         DummyPackageLoader.from_configfile(url="some-url")