diff --git a/requirements-swh.txt b/requirements-swh.txt
index 81c81968..726dd6ea 100644
--- a/requirements-swh.txt
+++ b/requirements-swh.txt
@@ -1,2 +1,2 @@
 swh.core[http] >= 0.4
-swh.model >= 0.7.2
+swh.model >= 1.0.0
diff --git a/swh/deposit/api/common.py b/swh/deposit/api/common.py
index 5047419b..b3e0b367 100644
--- a/swh/deposit/api/common.py
+++ b/swh/deposit/api/common.py
@@ -1,1197 +1,1198 @@
 # Copyright (C) 2017-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from abc import ABCMeta, abstractmethod
 import datetime
 import hashlib
 import json
 from typing import Any, Dict, Optional, Sequence, Tuple, Type, Union
 import uuid
 
 import attr
 from django.core.files.uploadedfile import UploadedFile
 from django.http import FileResponse, HttpResponse
 from django.shortcuts import render
 from django.urls import reverse
 from django.utils import timezone
 from rest_framework import status
 from rest_framework.authentication import BaseAuthentication, BasicAuthentication
 from rest_framework.permissions import BasePermission, IsAuthenticated
 from rest_framework.request import Request
 from rest_framework.views import APIView
 
 from swh.deposit.api.checks import check_metadata
 from swh.deposit.api.converters import convert_status_detail
 from swh.deposit.models import Deposit
 from swh.deposit.utils import compute_metadata_context
 from swh.model import hashutil
-from swh.model.identifiers import SWHID, ValidationError
+from swh.model.identifiers import ExtendedSWHID, QualifiedSWHID, ValidationError
 from swh.model.model import (
     MetadataAuthority,
     MetadataAuthorityType,
     MetadataFetcher,
+    Origin,
     RawExtrinsicMetadata,
 )
 from swh.scheduler.utils import create_oneshot_task_dict
 
 from ..config import (
     ARCHIVE_KEY,
     ARCHIVE_TYPE,
     CONT_FILE_IRI,
     DEPOSIT_STATUS_DEPOSITED,
     DEPOSIT_STATUS_LOAD_SUCCESS,
     DEPOSIT_STATUS_PARTIAL,
     EDIT_IRI,
     EM_IRI,
     METADATA_KEY,
     METADATA_TYPE,
     RAW_METADATA_KEY,
     SE_IRI,
     STATE_IRI,
     APIConfig,
 )
 from ..errors import (
     BAD_REQUEST,
     CHECKSUM_MISMATCH,
     ERROR_CONTENT,
     FORBIDDEN,
     MAX_UPLOAD_SIZE_EXCEEDED,
     MEDIATION_NOT_ALLOWED,
     METHOD_NOT_ALLOWED,
     NOT_FOUND,
     PARSING_ERROR,
     DepositError,
     ParserError,
 )
 from ..models import DepositClient, DepositCollection, DepositRequest
 from ..parsers import parse_xml
-from ..utils import parse_swh_reference
+from ..utils import extended_swhid_from_qualified, parse_swh_reference
 
 ACCEPT_PACKAGINGS = ["http://purl.org/net/sword/package/SimpleZip"]
 ACCEPT_ARCHIVE_CONTENT_TYPES = ["application/zip", "application/x-tar"]
 
 
 @attr.s
 class ParsedRequestHeaders:
     content_type = attr.ib(type=str)
     content_length = attr.ib(type=Optional[int])
     in_progress = attr.ib(type=bool)
     content_disposition = attr.ib(type=Optional[str])
     content_md5sum = attr.ib(type=Optional[bytes])
     packaging = attr.ib(type=Optional[str])
     slug = attr.ib(type=Optional[str])
     on_behalf_of = attr.ib(type=Optional[str])
     metadata_relevant = attr.ib(type=Optional[str])
     swhid = attr.ib(type=Optional[str])
 
 
 @attr.s
 class Receipt:
     """Data computed while handling the request body that will be served in the
     Deposit Receipt."""
 
     deposit_id = attr.ib(type=int)
     deposit_date = attr.ib(type=datetime.datetime)
     status = attr.ib(type=str)
     archive = attr.ib(type=Optional[str])
 
 
 def _compute_md5(filehandler: UploadedFile) -> bytes:
     h = hashlib.md5()
     for chunk in filehandler:
         h.update(chunk)  # type: ignore
     return h.digest()
 
 
 def get_deposit_by_id(
     deposit_id: int, collection_name: Optional[str] = None
 ) -> Deposit:
     """Gets an existing Deposit object if it exists, or raises `DepositError`.
     If `collection` is not None, also checks the deposit belongs to the collection."""
     try:
         deposit = Deposit.objects.get(pk=deposit_id)
     except Deposit.DoesNotExist:
         raise DepositError(NOT_FOUND, f"Deposit {deposit_id} does not exist")
 
     if collection_name and deposit.collection.name != collection_name:
         get_collection_by_name(collection_name)  # raises if does not exist
 
         raise DepositError(
             NOT_FOUND,
             f"Deposit {deposit_id} does not belong to collection {collection_name}",
         )
 
     return deposit
 
 
 def get_collection_by_name(collection_name: str):
     """Gets an existing Deposit object if it exists, or raises `DepositError`."""
     try:
         collection = DepositCollection.objects.get(name=collection_name)
     except DepositCollection.DoesNotExist:
         raise DepositError(NOT_FOUND, f"Unknown collection name {collection_name}")
 
     assert collection is not None
 
     return collection
 
 
 def guess_deposit_origin_url(deposit: Deposit):
     """Guesses an origin url for the given deposit."""
     external_id = deposit.external_id
     if not external_id:
         # The client provided neither an origin_url nor a slug. That's inconvenient,
         # but SWORD requires we support it. So let's generate a random slug.
         external_id = str(uuid.uuid4())
     return "%s/%s" % (deposit.client.provider_url.rstrip("/"), external_id)
 
 
 def check_client_origin(client: DepositClient, origin_url: str):
     provider_url = client.provider_url.rstrip("/") + "/"
     if not origin_url.startswith(provider_url):
         raise DepositError(
             FORBIDDEN,
             f"Cannot create origin {origin_url}, it must start with " f"{provider_url}",
         )
 
 
 class AuthenticatedAPIView(APIView):
     """Mixin intended as a based API view to enforce the basic
        authentication check
 
     """
 
     authentication_classes: Sequence[Type[BaseAuthentication]] = (BasicAuthentication,)
     permission_classes: Sequence[Type[BasePermission]] = (IsAuthenticated,)
 
 
 class APIBase(APIConfig, AuthenticatedAPIView, metaclass=ABCMeta):
     """Base deposit request class sharing multiple common behaviors.
 
     """
 
     _client: Optional[DepositClient] = None
 
     def _read_headers(self, request: Request) -> ParsedRequestHeaders:
         """Read and unify the necessary headers from the request (those are
            not stored in the same location or not properly formatted).
 
         Args:
             request: Input request
 
         Returns:
             Dictionary with the following keys (some associated values may be
               None):
                 - content-type
                 - content-length
                 - in-progress
                 - content-disposition
                 - packaging
                 - slug
                 - on-behalf-of
 
         """
         meta = request._request.META
 
         content_length = meta.get("CONTENT_LENGTH")
         if content_length and isinstance(content_length, str):
             content_length = int(content_length)
 
         # final deposit if not provided
         in_progress = meta.get("HTTP_IN_PROGRESS", False)
         if isinstance(in_progress, str):
             in_progress = in_progress.lower() == "true"
 
         content_md5sum = meta.get("HTTP_CONTENT_MD5")
         if content_md5sum:
             content_md5sum = bytes.fromhex(content_md5sum)
 
         return ParsedRequestHeaders(
             content_type=request.content_type,
             content_length=content_length,
             in_progress=in_progress,
             content_disposition=meta.get("HTTP_CONTENT_DISPOSITION"),
             content_md5sum=content_md5sum,
             packaging=meta.get("HTTP_PACKAGING"),
             slug=meta.get("HTTP_SLUG"),
             on_behalf_of=meta.get("HTTP_ON_BEHALF_OF"),
             metadata_relevant=meta.get("HTTP_METADATA_RELEVANT"),
             swhid=meta.get("HTTP_X_CHECK_SWHID"),
         )
 
     def _deposit_put(self, deposit: Deposit, in_progress: bool = False) -> None:
         """Save/Update a deposit in db.
 
         Args:
             deposit: deposit being updated/created
             in_progress: deposit status
         """
         if in_progress is False:
             self._complete_deposit(deposit)
         else:
             deposit.status = DEPOSIT_STATUS_PARTIAL
             deposit.save()
 
     def _complete_deposit(self, deposit: Deposit) -> None:
         """Marks the deposit as 'deposited', then schedule a check task if configured
         to do so."""
         deposit.complete_date = timezone.now()
         deposit.status = DEPOSIT_STATUS_DEPOSITED
         deposit.save()
 
         if not deposit.origin_url:
             deposit.origin_url = guess_deposit_origin_url(deposit)
 
         if self.config["checks"]:
             scheduler = self.scheduler
             if deposit.status == DEPOSIT_STATUS_DEPOSITED and not deposit.check_task_id:
                 task = create_oneshot_task_dict(
                     "check-deposit",
                     collection=deposit.collection.name,
                     deposit_id=deposit.id,
                     retries_left=3,
                 )
                 check_task_id = scheduler.create_tasks([task])[0]["id"]
                 deposit.check_task_id = check_task_id
 
         deposit.save()
 
     def _deposit_request_put(
         self,
         deposit: Deposit,
         deposit_request_data: Dict[str, Any],
         replace_metadata: bool = False,
         replace_archives: bool = False,
     ) -> DepositRequest:
         """Save a deposit request with metadata attached to a deposit.
 
         Args:
             deposit: The deposit concerned by the request
             deposit_request_data: The dictionary with at most 2 deposit
               request types (archive, metadata) to associate to the deposit
             replace_metadata: Flag defining if we add or update
               existing metadata to the deposit
             replace_archives: Flag defining if we add or update
               archives to existing deposit
 
         Returns:
             the DepositRequest object stored in the backend
 
         """
         if replace_metadata:
             DepositRequest.objects.filter(deposit=deposit, type=METADATA_TYPE).delete()
 
         if replace_archives:
             DepositRequest.objects.filter(deposit=deposit, type=ARCHIVE_TYPE).delete()
 
         deposit_request = None
 
         archive_file = deposit_request_data.get(ARCHIVE_KEY)
         if archive_file:
             deposit_request = DepositRequest(
                 type=ARCHIVE_TYPE, deposit=deposit, archive=archive_file
             )
             deposit_request.save()
 
         metadata = deposit_request_data.get(METADATA_KEY)
         if metadata:
             raw_metadata = deposit_request_data[RAW_METADATA_KEY]
             deposit_request = DepositRequest(
                 type=METADATA_TYPE,
                 deposit=deposit,
                 metadata=metadata,
                 raw_metadata=raw_metadata.decode("utf-8"),
             )
             deposit_request.save()
 
         assert deposit_request is not None
         return deposit_request
 
     def _delete_archives(self, collection_name: str, deposit: Deposit) -> Dict:
         """Delete archive references from the deposit id.
 
         """
         DepositRequest.objects.filter(deposit=deposit, type=ARCHIVE_TYPE).delete()
 
         return {}
 
     def _delete_deposit(self, collection_name: str, deposit: Deposit) -> Dict:
         """Delete deposit reference.
 
         Args:
             collection_name: Client's collection
             deposit: The deposit to delete
 
         Returns
             Empty dict when ok.
             Dict with error key to describe the failure.
 
         """
         if deposit.collection.name != collection_name:
             summary = "Cannot delete a deposit from another collection"
             description = "Deposit %s does not belong to the collection %s" % (
                 deposit.id,
                 collection_name,
             )
             raise DepositError(
                 BAD_REQUEST, summary=summary, verbose_description=description
             )
 
         DepositRequest.objects.filter(deposit=deposit).delete()
         deposit.delete()
 
         return {}
 
     def _check_file_length(
         self, filehandler: UploadedFile, content_length: Optional[int] = None,
     ) -> None:
         """Check the filehandler passed as argument has exactly the
         expected content_length
 
         Args:
             filehandler: The file to check
             content_length: the expected length if provided.
 
         Raises:
             DepositError if the actual length does not match
         """
         max_upload_size = self.config["max_upload_size"]
         if content_length:
             length = filehandler.size
             if length != content_length:
                 raise DepositError(status.HTTP_412_PRECONDITION_FAILED, "Wrong length")
 
         if filehandler.size > max_upload_size:
             raise DepositError(
                 MAX_UPLOAD_SIZE_EXCEEDED,
                 f"Upload size limit exceeded (max {max_upload_size} bytes)."
                 "Please consider sending the archive in multiple steps.",
             )
 
     def _check_file_md5sum(
         self, filehandler: UploadedFile, md5sum: Optional[bytes],
     ) -> None:
         """Check the filehandler passed as argument has the expected md5sum
 
         Args:
             filehandler: The file to check
             md5sum: md5 hash expected from the file's content
 
         Raises:
             DepositError if the md5sum does not match
 
         """
         if md5sum:
             _md5sum = _compute_md5(filehandler)
             if _md5sum != md5sum:
                 raise DepositError(
                     CHECKSUM_MISMATCH,
                     "Wrong md5 hash",
                     f"The checksum sent {hashutil.hash_to_hex(md5sum)} and the actual "
                     f"checksum {hashutil.hash_to_hex(_md5sum)} does not match.",
                 )
 
     def _binary_upload(
         self,
         request: Request,
         headers: ParsedRequestHeaders,
         collection_name: str,
         deposit: Deposit,
         replace_metadata: bool = False,
         replace_archives: bool = False,
     ) -> Receipt:
         """Binary upload routine.
 
         Other than such a request, a 415 response is returned.
 
         Args:
             request: the request holding information to parse
                 and inject in db
             headers: parsed request headers
             collection_name: the associated client
             deposit: deposit to be updated
             replace_metadata: 'Update or add' request to existing
               deposit. If False (default), this adds new metadata request to
               existing ones. Otherwise, this will replace existing metadata.
             replace_archives: 'Update or add' request to existing
               deposit. If False (default), this adds new archive request to
               existing ones. Otherwise, this will replace existing archives.
               ones.
 
         Raises:
             - 400 (bad request) if the request is not providing an external
               identifier
             - 413 (request entity too large) if the length of the
               archive exceeds the max size configured
             - 412 (precondition failed) if the length or md5 hash provided
               mismatch the reality of the archive
             - 415 (unsupported media type) if a wrong media type is provided
 
         """
         content_length = headers.content_length
         if not content_length:
             raise DepositError(
                 BAD_REQUEST,
                 "CONTENT_LENGTH header is mandatory",
                 "For archive deposit, the CONTENT_LENGTH header must be sent.",
             )
 
         content_disposition = headers.content_disposition
         if not content_disposition:
             raise DepositError(
                 BAD_REQUEST,
                 "CONTENT_DISPOSITION header is mandatory",
                 "For archive deposit, the CONTENT_DISPOSITION header must be sent.",
             )
 
         packaging = headers.packaging
         if packaging and packaging not in ACCEPT_PACKAGINGS:
             raise DepositError(
                 BAD_REQUEST,
                 f"Only packaging {ACCEPT_PACKAGINGS} is supported",
                 f"The packaging provided {packaging} is not supported",
             )
 
         filehandler = request.FILES["file"]
         assert isinstance(filehandler, UploadedFile), filehandler
 
         self._check_file_length(filehandler, content_length)
         self._check_file_md5sum(filehandler, headers.content_md5sum)
 
         # actual storage of data
         archive_metadata = filehandler
         self._deposit_put(
             deposit=deposit, in_progress=headers.in_progress,
         )
         self._deposit_request_put(
             deposit,
             {ARCHIVE_KEY: archive_metadata},
             replace_metadata=replace_metadata,
             replace_archives=replace_archives,
         )
 
         return Receipt(
             deposit_id=deposit.id,
             deposit_date=deposit.reception_date,
             status=deposit.status,
             archive=filehandler.name,
         )
 
     def _read_metadata(self, metadata_stream) -> Tuple[bytes, Dict[str, Any]]:
         """Given a metadata stream, reads the metadata and returns both the
            parsed and the raw metadata.
 
         """
         raw_metadata = metadata_stream.read()
         metadata = parse_xml(raw_metadata)
         return raw_metadata, metadata
 
     def _multipart_upload(
         self,
         request: Request,
         headers: ParsedRequestHeaders,
         collection_name: str,
         deposit: Deposit,
         replace_metadata: bool = False,
         replace_archives: bool = False,
     ) -> Receipt:
         """Multipart upload supported with exactly:
         - 1 archive (zip)
         - 1 atom entry
 
         Other than such a request, a 415 response is returned.
 
         Args:
             request: the request holding information to parse
                 and inject in db
             headers: parsed request headers
             collection_name: the associated client
             deposit: deposit to be updated
             replace_metadata: 'Update or add' request to existing
               deposit. If False (default), this adds new metadata request to
               existing ones. Otherwise, this will replace existing metadata.
             replace_archives: 'Update or add' request to existing
               deposit. If False (default), this adds new archive request to
               existing ones. Otherwise, this will replace existing archives.
               ones.
 
         Raises:
             - 400 (bad request) if the request is not providing an external
               identifier
             - 412 (precondition failed) if the potentially md5 hash provided
               mismatch the reality of the archive
             - 413 (request entity too large) if the length of the
               archive exceeds the max size configured
             - 415 (unsupported media type) if a wrong media type is provided
 
         """
         content_types_present = set()
 
         data: Dict[str, Optional[Any]] = {
             "application/zip": None,  # expected either zip
             "application/x-tar": None,  # or x-tar
             "application/atom+xml": None,
         }
         for key, value in request.FILES.items():
             fh = value
             content_type = fh.content_type
             if content_type in content_types_present:
                 raise DepositError(
                     ERROR_CONTENT,
                     "Only 1 application/zip (or application/x-tar) archive "
                     "and 1 atom+xml entry is supported (as per sword2.0 "
                     "specification)",
                     "You provided more than 1 application/(zip|x-tar) "
                     "or more than 1 application/atom+xml content-disposition "
                     "header in the multipart deposit",
                 )
 
             content_types_present.add(content_type)
             assert content_type is not None
             data[content_type] = fh
 
         if len(content_types_present) != 2:
             raise DepositError(
                 ERROR_CONTENT,
                 "You must provide both 1 application/zip (or "
                 "application/x-tar) and 1 atom+xml entry for multipart "
                 "deposit",
                 "You need to provide only 1 application/(zip|x-tar) "
                 "and 1 application/atom+xml content-disposition header "
                 "in the multipart deposit",
             )
 
         filehandler = data["application/zip"]
         if not filehandler:
             filehandler = data["application/x-tar"]
 
         assert isinstance(filehandler, UploadedFile), filehandler
 
         self._check_file_length(filehandler)
         self._check_file_md5sum(filehandler, headers.content_md5sum)
 
         try:
             raw_metadata, metadata = self._read_metadata(data["application/atom+xml"])
         except ParserError:
             raise DepositError(
                 PARSING_ERROR,
                 "Malformed xml metadata",
                 "The xml received is malformed. "
                 "Please ensure your metadata file is correctly formatted.",
             )
 
         self._set_deposit_origin_from_metadata(deposit, metadata, headers)
 
         # actual storage of data
         self._deposit_put(
             deposit=deposit, in_progress=headers.in_progress,
         )
         deposit_request_data = {
             ARCHIVE_KEY: filehandler,
             METADATA_KEY: metadata,
             RAW_METADATA_KEY: raw_metadata,
         }
         self._deposit_request_put(
             deposit, deposit_request_data, replace_metadata, replace_archives
         )
 
         assert filehandler is not None
         return Receipt(
             deposit_id=deposit.id,
             deposit_date=deposit.reception_date,
             archive=filehandler.name,
             status=deposit.status,
         )
 
     def _store_metadata_deposit(
         self,
         deposit: Deposit,
-        swhid_reference: Union[str, SWHID],
+        swhid_reference: Union[str, QualifiedSWHID],
         metadata: Dict,
         raw_metadata: bytes,
         deposit_origin: Optional[str] = None,
-    ) -> Tuple[Union[SWHID, str], Union[SWHID, str], Deposit, DepositRequest]:
+    ) -> Tuple[ExtendedSWHID, Deposit, DepositRequest]:
         """When all user inputs pass the checks, this associates the raw_metadata to the
            swhid_reference in the raw extrinsic metadata storage. In case of any issues,
            a bad request response is returned to the user with the details.
 
             Checks:
             - metadata are technically parsable
             - metadata pass the functional checks
             - SWHID (if any) is technically valid
 
         Args:
             deposit: Deposit reference
             swhid_reference: The swhid or the origin to attach metadata information to
             metadata: Full dict of metadata to check for validity (parsed out of
               raw_metadata)
             raw_metadata: The actual raw metadata to send in the storage metadata
             deposit_origin: Optional deposit origin url to use if any (e.g. deposit
               update scenario provides one)
 
         Raises:
             DepositError in case of incorrect inputs from the deposit client
             (e.g. functionally invalid metadata, ...)
 
         Returns:
-            Tuple of core swhid, swhid context, deposit and deposit request
+            Tuple of target swhid, deposit, and deposit request
 
         """
         metadata_ok, error_details = check_metadata(metadata)
         if not metadata_ok:
             assert error_details, "Details should be set when a failure occurs"
             raise DepositError(
                 BAD_REQUEST,
                 "Functional metadata checks failure",
                 convert_status_detail(error_details),
             )
 
         metadata_authority = MetadataAuthority(
             type=MetadataAuthorityType.DEPOSIT_CLIENT,
             url=deposit.client.provider_url,
             metadata={"name": deposit.client.last_name},
         )
 
         metadata_fetcher = MetadataFetcher(
             name=self.tool["name"],
             version=self.tool["version"],
             metadata=self.tool["configuration"],
         )
 
         # replace metadata within the deposit backend
         deposit_request_data = {
             METADATA_KEY: metadata,
             RAW_METADATA_KEY: raw_metadata,
         }
 
         # actually add the metadata to the completed deposit
         deposit_request = self._deposit_request_put(deposit, deposit_request_data)
 
-        object_type, metadata_context = compute_metadata_context(swhid_reference)
-        if deposit_origin:  # metadata deposit update on completed deposit
-            metadata_context["origin"] = deposit_origin
-
-        swhid_core: Union[str, SWHID]
+        target_swhid: ExtendedSWHID  # origin URL or CoreSWHID
         if isinstance(swhid_reference, str):
-            swhid_core = swhid_reference
+            target_swhid = Origin(swhid_reference).swhid()
+            metadata_context = {}
         else:
-            swhid_core = attr.evolve(swhid_reference, metadata={})
+            metadata_context = compute_metadata_context(swhid_reference)
+            if deposit_origin:  # metadata deposit update on completed deposit
+                metadata_context["origin"] = deposit_origin
+
+            target_swhid = extended_swhid_from_qualified(swhid_reference)
 
         # store that metadata to the metadata storage
         metadata_object = RawExtrinsicMetadata(
-            type=object_type,
-            target=swhid_core,  # core swhid or origin
+            target=target_swhid,  # core swhid or origin
             discovery_date=deposit_request.date,
             authority=metadata_authority,
             fetcher=metadata_fetcher,
             format="sword-v2-atom-codemeta",
             metadata=raw_metadata,
             **metadata_context,
         )
 
         # write to metadata storage
         self.storage_metadata.metadata_authority_add([metadata_authority])
         self.storage_metadata.metadata_fetcher_add([metadata_fetcher])
         self.storage_metadata.raw_extrinsic_metadata_add([metadata_object])
 
-        return (swhid_core, swhid_reference, deposit, deposit_request)
+        return (target_swhid, deposit, deposit_request)
 
     def _atom_entry(
         self,
         request: Request,
         headers: ParsedRequestHeaders,
         collection_name: str,
         deposit: Deposit,
         replace_metadata: bool = False,
         replace_archives: bool = False,
     ) -> Receipt:
         """Atom entry deposit.
 
         Args:
             request: the request holding information to parse
                 and inject in db
             headers: parsed request headers
             collection_name: the associated client
             deposit: deposit to be updated
             replace_metadata: 'Update or add' request to existing
               deposit. If False (default), this adds new metadata request to
               existing ones. Otherwise, this will replace existing metadata.
             replace_archives: 'Update or add' request to existing
               deposit. If False (default), this adds new archive request to
               existing ones. Otherwise, this will replace existing archives.
               ones.
 
         Raises:
             - 400 (bad request) if the request is not providing an external
               identifier
             - 400 (bad request) if the request's body is empty
             - 415 (unsupported media type) if a wrong media type is provided
 
         """
         try:
             raw_metadata, metadata = self._read_metadata(request.data)
         except ParserError:
             raise DepositError(
                 BAD_REQUEST,
                 "Malformed xml metadata",
                 "The xml received is malformed. "
                 "Please ensure your metadata file is correctly formatted.",
             )
 
         if metadata is None:
             raise DepositError(
                 BAD_REQUEST,
                 "Empty body request is not supported",
                 "Atom entry deposit is supposed to send for metadata. "
                 "If the body is empty, there is no metadata.",
             )
 
         self._set_deposit_origin_from_metadata(deposit, metadata, headers)
 
         # Determine if we are in the metadata-only deposit case
         try:
-            swhid = parse_swh_reference(metadata)
+            swhid_ref = parse_swh_reference(metadata)
         except ValidationError as e:
             raise DepositError(
                 PARSING_ERROR, "Invalid SWHID reference", str(e),
             )
 
-        if swhid is not None and (
+        if swhid_ref is not None and (
             deposit.origin_url or deposit.parent or deposit.external_id
         ):
             raise DepositError(
                 BAD_REQUEST,
                 "<swh:reference> is for metadata-only deposits and "
                 "<swh:create_origin> / <swh:add_to_origin> / Slug are for "
                 "code deposits, only one may be used on a given deposit.",
             )
 
-        if swhid is not None:
+        if swhid_ref is not None:
             deposit.save()  # We need a deposit id
-            swhid, swhid_ref, depo, depo_request = self._store_metadata_deposit(
-                deposit, swhid, metadata, raw_metadata
+            target_swhid, depo, depo_request = self._store_metadata_deposit(
+                deposit, swhid_ref, metadata, raw_metadata
             )
 
             deposit.status = DEPOSIT_STATUS_LOAD_SUCCESS
-            if isinstance(swhid_ref, SWHID):
-                deposit.swhid = str(swhid)
+            if isinstance(swhid_ref, QualifiedSWHID):
+                deposit.swhid = str(extended_swhid_from_qualified(swhid_ref))
                 deposit.swhid_context = str(swhid_ref)
             deposit.complete_date = depo_request.date
             deposit.reception_date = depo_request.date
             deposit.save()
 
             return Receipt(
                 deposit_id=deposit.id,
                 deposit_date=depo_request.date,
                 status=deposit.status,
                 archive=None,
             )
 
         self._deposit_put(
             deposit=deposit, in_progress=headers.in_progress,
         )
 
         self._deposit_request_put(
             deposit,
             {METADATA_KEY: metadata, RAW_METADATA_KEY: raw_metadata},
             replace_metadata,
             replace_archives,
         )
 
         return Receipt(
             deposit_id=deposit.id,
             deposit_date=deposit.reception_date,
             status=deposit.status,
             archive=None,
         )
 
     def _set_deposit_origin_from_metadata(self, deposit, metadata, headers):
         create_origin = metadata.get("swh:deposit", {}).get("swh:create_origin")
         add_to_origin = metadata.get("swh:deposit", {}).get("swh:add_to_origin")
 
         if create_origin and add_to_origin:
             raise DepositError(
                 BAD_REQUEST,
                 "<swh:create_origin> and <swh:add_to_origin> are mutually exclusive, "
                 "as they respectively create a new origin and add to an existing "
                 "origin.",
             )
 
         if create_origin:
             origin_url = create_origin["swh:origin"]["@url"]
             check_client_origin(deposit.client, origin_url)
             deposit.origin_url = origin_url
 
         if add_to_origin:
             origin_url = add_to_origin["swh:origin"]["@url"]
             check_client_origin(deposit.client, origin_url)
             deposit.parent = (
                 Deposit.objects.filter(
                     client=deposit.client,
                     origin_url=origin_url,
                     status=DEPOSIT_STATUS_LOAD_SUCCESS,
                 )
                 .order_by("-id")[0:1]
                 .get()
             )
             deposit.origin_url = origin_url
 
         if "atom:external_identifier" in metadata:
             # Deprecated tag.
             # When clients stopped using it, this should raise an error
             # unconditionally
 
             if deposit.origin_url:
                 raise DepositError(
                     BAD_REQUEST,
                     "<external_identifier> is deprecated, you should only use "
                     "<swh:create_origin> and <swh:add_to_origin> from now on.",
                 )
 
             if headers.slug and metadata["atom:external_identifier"] != headers.slug:
                 raise DepositError(
                     BAD_REQUEST,
                     "The <external_identifier> tag and Slug header are deprecated, "
                     "<swh:create_origin> or <swh:add_to_origin> "
                     "should be used instead.",
                 )
 
     def _empty_post(
         self,
         request: Request,
         headers: ParsedRequestHeaders,
         collection_name: str,
         deposit: Deposit,
     ) -> Receipt:
         """Empty post to finalize a deposit.
 
         Args:
             request: the request holding information to parse
                 and inject in db
             headers: parsed request headers
             collection_name: the associated client
             deposit: deposit to be finalized
         """
         self._complete_deposit(deposit)
 
         assert deposit.complete_date is not None
 
         return Receipt(
             deposit_id=deposit.id,
             deposit_date=deposit.complete_date,
             status=deposit.status,
             archive=None,
         )
 
     def additional_checks(
         self,
         request: Request,
         headers: ParsedRequestHeaders,
         collection_name: str,
         deposit: Optional[Deposit],
     ) -> Dict[str, Any]:
         """Permit the child class to enrich additional checks.
 
         Returns:
             dict with 'error' detailing the problem.
 
         """
         return {}
 
     def get_client(self, request) -> DepositClient:
         # This class depends on AuthenticatedAPIView, so request.user.username
         # is always set
         username = request.user.username
         assert username is not None
 
         if self._client is None:
             try:
                 self._client = DepositClient.objects.get(  # type: ignore
                     username=username
                 )
             except DepositClient.DoesNotExist:
                 raise DepositError(NOT_FOUND, f"Unknown client name {username}")
 
         assert self._client.username == username
 
         return self._client
 
     def checks(
         self, request: Request, collection_name: str, deposit: Optional[Deposit] = None
     ) -> ParsedRequestHeaders:
         if deposit is None:
             collection = get_collection_by_name(collection_name)
         else:
             assert collection_name == deposit.collection.name
             collection = deposit.collection
 
         client = self.get_client(request)
         collection_id = collection.id
         collections = client.collections
         assert collections is not None
         if collection_id not in collections:
             raise DepositError(
                 FORBIDDEN,
                 f"Client {client.username} cannot access collection {collection_name}",
             )
 
         headers = self._read_headers(request)
 
         if deposit is not None:
             self.restrict_access(request, headers, deposit)
 
         if headers.on_behalf_of:
             raise DepositError(MEDIATION_NOT_ALLOWED, "Mediation is not supported.")
 
         self.additional_checks(request, headers, collection_name, deposit)
 
         return headers
 
     def restrict_access(
         self, request: Request, headers: ParsedRequestHeaders, deposit: Deposit
     ) -> None:
         """Allow modifications on deposit with status 'partial' only, reject the rest.
 
         """
         if request.method != "GET" and deposit.status != DEPOSIT_STATUS_PARTIAL:
             summary = "You can only act on deposit with status '%s'" % (
                 DEPOSIT_STATUS_PARTIAL,
             )
             description = f"This deposit has status '{deposit.status}'"
             raise DepositError(
                 BAD_REQUEST, summary=summary, verbose_description=description
             )
 
     def _basic_not_allowed_method(self, request: Request, method: str):
         raise DepositError(
             METHOD_NOT_ALLOWED, f"{method} method is not supported on this endpoint",
         )
 
     def get(
         self, request: Request, collection_name: str, deposit_id: int
     ) -> Union[HttpResponse, FileResponse]:
         return self._basic_not_allowed_method(request, "GET")
 
     def post(
         self, request: Request, collection_name: str, deposit_id: Optional[int] = None
     ) -> HttpResponse:
         return self._basic_not_allowed_method(request, "POST")
 
     def put(
         self, request: Request, collection_name: str, deposit_id: int
     ) -> HttpResponse:
         return self._basic_not_allowed_method(request, "PUT")
 
     def delete(
         self, request: Request, collection_name: str, deposit_id: Optional[int] = None
     ) -> HttpResponse:
         return self._basic_not_allowed_method(request, "DELETE")
 
 
 class APIGet(APIBase, metaclass=ABCMeta):
     """Mixin for class to support GET method.
 
     """
 
     def get(
         self, request: Request, collection_name: str, deposit_id: int
     ) -> Union[HttpResponse, FileResponse]:
         """Endpoint to create/add resources to deposit.
 
         Returns:
             200 response when no error during routine occurred
             400 if the deposit does not belong to the collection
             404 if the deposit or the collection does not exist
 
         """
         deposit = get_deposit_by_id(deposit_id, collection_name)
         self.checks(request, collection_name, deposit)
 
         r = self.process_get(request, collection_name, deposit)
 
         status, content, content_type = r
         if content_type == "swh/generator":
             with content as path:
                 return FileResponse(
                     open(path, "rb"), status=status, content_type="application/tar"
                 )
         if content_type == "application/json":
             return HttpResponse(
                 json.dumps(content), status=status, content_type=content_type
             )
         return HttpResponse(content, status=status, content_type=content_type)
 
     @abstractmethod
     def process_get(
         self, request: Request, collection_name: str, deposit: Deposit
     ) -> Tuple[int, Any, str]:
         """Routine to deal with the deposit's get processing.
 
         Returns:
             Tuple status, stream of content, content-type
 
         """
         pass
 
 
 class APIPost(APIBase, metaclass=ABCMeta):
     """Mixin for class to support POST method.
 
     """
 
     def post(
         self, request: Request, collection_name: str, deposit_id: Optional[int] = None
     ) -> HttpResponse:
         """Endpoint to create/add resources to deposit.
 
         Returns:
             204 response when no error during routine occurred.
             400 if the deposit does not belong to the collection
             404 if the deposit or the collection does not exist
 
         """
         if deposit_id is None:
             deposit = None
         else:
             deposit = get_deposit_by_id(deposit_id, collection_name)
         headers = self.checks(request, collection_name, deposit)
 
         status, iri_key, receipt = self.process_post(
             request, headers, collection_name, deposit
         )
 
         return self._make_deposit_receipt(
             request, collection_name, status, iri_key, receipt,
         )
 
     def _make_deposit_receipt(
         self,
         request,
         collection_name: str,
         status: int,
         iri_key: str,
         receipt: Receipt,
     ) -> HttpResponse:
         """Returns an HttpResponse with a SWORD Deposit receipt as content."""
 
         # Build the IRIs in the receipt
         args = [collection_name, receipt.deposit_id]
         iris = {
             iri: request.build_absolute_uri(reverse(iri, args=args))
             for iri in [EM_IRI, EDIT_IRI, CONT_FILE_IRI, SE_IRI, STATE_IRI]
         }
 
         context = {
             **attr.asdict(receipt),
             **iris,
             "packagings": ACCEPT_PACKAGINGS,
         }
 
         response = render(
             request,
             "deposit/deposit_receipt.xml",
             context=context,
             content_type="application/xml",
             status=status,
         )
         response._headers["location"] = "Location", iris[iri_key]  # type: ignore
         return response
 
     @abstractmethod
     def process_post(
         self,
         request,
         headers: ParsedRequestHeaders,
         collection_name: str,
         deposit: Optional[Deposit] = None,
     ) -> Tuple[int, str, Receipt]:
         """Routine to deal with the deposit's processing.
 
         Returns
             Tuple of:
             - response status code (200, 201, etc...)
             - key iri (EM_IRI, EDIT_IRI, etc...)
             - Receipt
 
         """
         pass
 
 
 class APIPut(APIBase, metaclass=ABCMeta):
     """Mixin for class to support PUT method.
 
     """
 
     def put(
         self, request: Request, collection_name: str, deposit_id: int
     ) -> HttpResponse:
         """Endpoint to update deposit resources.
 
         Returns:
             204 response when no error during routine occurred.
             400 if the deposit does not belong to the collection
             404 if the deposit or the collection does not exist
 
         """
         if deposit_id is None:
             deposit = None
         else:
             deposit = get_deposit_by_id(deposit_id, collection_name)
         headers = self.checks(request, collection_name, deposit)
         self.process_put(request, headers, collection_name, deposit)
 
         return HttpResponse(status=status.HTTP_204_NO_CONTENT)
 
     @abstractmethod
     def process_put(
         self,
         request: Request,
         headers: ParsedRequestHeaders,
         collection_name: str,
         deposit: Deposit,
     ) -> None:
         """Routine to deal with updating a deposit in some way.
 
         Returns
             dictionary of the processing result
 
         """
         pass
 
 
 class APIDelete(APIBase, metaclass=ABCMeta):
     """Mixin for class to support DELETE method.
 
     """
 
     def delete(
         self, request: Request, collection_name: str, deposit_id: Optional[int] = None
     ) -> HttpResponse:
         """Endpoint to delete some deposit's resources (archives, deposit).
 
         Returns:
             204 response when no error during routine occurred.
             400 if the deposit does not belong to the collection
             404 if the deposit or the collection does not exist
 
         """
         assert deposit_id is not None
         deposit = get_deposit_by_id(deposit_id, collection_name)
         self.checks(request, collection_name, deposit)
         self.process_delete(request, collection_name, deposit)
 
         return HttpResponse(status=status.HTTP_204_NO_CONTENT)
 
     @abstractmethod
     def process_delete(
         self, request: Request, collection_name: str, deposit: Deposit
     ) -> None:
         """Routine to delete a resource.
 
         This is mostly not allowed except for the
         EM_IRI (cf. .api.deposit_update.APIUpdateArchive)
 
         """
         pass
diff --git a/swh/deposit/api/edit.py b/swh/deposit/api/edit.py
index 0ddd35e3..3d0d6574 100644
--- a/swh/deposit/api/edit.py
+++ b/swh/deposit/api/edit.py
@@ -1,138 +1,142 @@
 # Copyright (C) 2017-2020  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from rest_framework.request import Request
 
 from swh.deposit.models import Deposit
-from swh.model.identifiers import parse_swhid
+from swh.model.identifiers import QualifiedSWHID
 
 from ..config import DEPOSIT_STATUS_LOAD_SUCCESS
 from ..errors import BAD_REQUEST, DepositError, ParserError
 from ..parsers import SWHAtomEntryParser, SWHMultiPartParser
 from .common import APIDelete, APIPut, ParsedRequestHeaders
 
 
 class EditAPI(APIPut, APIDelete):
     """Deposit request class defining api endpoints for sword deposit.
 
        What's known as 'Edit-IRI' in the sword specification.
 
        HTTP verbs supported: PUT, DELETE
 
     """
 
     parser_classes = (SWHMultiPartParser, SWHAtomEntryParser)
 
     def restrict_access(
         self, request: Request, headers: ParsedRequestHeaders, deposit: Deposit
     ) -> None:
         """Relax restriction access to allow metadata update on deposit with status "done" when
         a swhid is provided.
 
         """
         if (
             request.method == "PUT"
             and headers.swhid is not None
             and deposit.status == DEPOSIT_STATUS_LOAD_SUCCESS
         ):
             # Allow metadata update on deposit with status "done" when swhid provided
             return
         # otherwise, let the standard access restriction check occur
         super().restrict_access(request, headers, deposit)
 
     def process_put(
         self,
         request,
         headers: ParsedRequestHeaders,
         collection_name: str,
         deposit: Deposit,
     ) -> None:
         """This allows the following scenarios:
 
         - multipart: replace all the deposit (status partial) metadata and archive
           with the provided ones.
         - atom: replace all the deposit (status partial) metadata with the
           provided ones.
         - with swhid, atom: Add new metatada to deposit (status done) with provided ones
           and push such metadata to the metadata storage directly.
 
            source:
            - http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html#protocoloperations_editingcontent_metadata
            - http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html#protocoloperations_editingcontent_multipart
 
         Raises:
             400 if any of the following occur:
             - the swhid provided and the deposit swhid do not match
             - the provided metadata xml file is malformed
             - the provided xml atom entry is empty
             - the provided swhid does not exist in the archive
 
         """  # noqa
         swhid = headers.swhid
         if swhid is None:
             if request.content_type.startswith("multipart/"):
                 self._multipart_upload(
                     request,
                     headers,
                     collection_name,
                     deposit=deposit,
                     replace_archives=True,
                     replace_metadata=True,
                 )
             else:
                 # standard metadata update (replace all metadata already provided to the
                 # deposit by the new ones)
                 self._atom_entry(
                     request,
                     headers,
                     collection_name,
                     deposit=deposit,
                     replace_metadata=True,
                 )
             return
 
         # Update metadata on a deposit already ingested
         # Write to the metadata storage (and the deposit backend)
         # no ingestion triggered
 
         assert deposit.status == DEPOSIT_STATUS_LOAD_SUCCESS
 
         if swhid != deposit.swhid:
             raise DepositError(
                 BAD_REQUEST,
                 f"Mismatched provided SWHID {swhid} with deposit's {deposit.swhid}.",
                 "The provided SWHID does not match the deposit to update. "
                 "Please ensure you send the correct deposit SWHID.",
             )
 
         try:
             raw_metadata, metadata = self._read_metadata(request.data)
         except ParserError:
             raise DepositError(
                 BAD_REQUEST,
                 "Malformed xml metadata",
                 "The xml received is malformed. "
                 "Please ensure your metadata file is correctly formatted.",
             )
 
         if not metadata:
             raise DepositError(
                 BAD_REQUEST,
                 "Empty body request is not supported",
                 "Atom entry deposit is supposed to send for metadata. "
                 "If the body is empty, there is no metadata.",
             )
 
-        _, _, deposit, deposit_request = self._store_metadata_deposit(
-            deposit, parse_swhid(swhid), metadata, raw_metadata, deposit.origin_url,
+        _, deposit, deposit_request = self._store_metadata_deposit(
+            deposit,
+            QualifiedSWHID.from_string(swhid),
+            metadata,
+            raw_metadata,
+            deposit.origin_url,
         )
 
     def process_delete(self, req, collection_name: str, deposit: Deposit) -> None:
         """Delete the container (deposit).
 
            source: http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html#protocoloperations_deleteconteiner  # noqa
 
         """
         self._delete_deposit(collection_name, deposit)
diff --git a/swh/deposit/api/private/deposit_read.py b/swh/deposit/api/private/deposit_read.py
index c38b2bbe..5b856518 100644
--- a/swh/deposit/api/private/deposit_read.py
+++ b/swh/deposit/api/private/deposit_read.py
@@ -1,199 +1,200 @@
 # Copyright (C) 2017-2020 The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from contextlib import contextmanager
 import os
 import shutil
 import tempfile
 from typing import Any, Dict, Tuple
 
 from rest_framework import status
 
 from swh.core import tarball
 from swh.deposit.utils import normalize_date
 from swh.model import identifiers
+from swh.model.hashutil import hash_to_hex
 from swh.model.model import MetadataAuthorityType
 
 from . import APIPrivateView, DepositReadMixin
 from ...config import ARCHIVE_TYPE, SWH_PERSON
 from ...models import Deposit
 from ..common import APIGet
 
 
 @contextmanager
 def aggregate_tarballs(extraction_dir, archive_paths):
     """Aggregate multiple tarballs into one and returns this new archive's
        path.
 
     Args:
         extraction_dir (path): Path to use for the tarballs computation
         archive_paths ([str]): Deposit's archive paths
 
     Returns:
         Tuple (directory to clean up, archive path (aggregated or not))
 
     """
     # rebuild one zip archive from (possibly) multiple ones
     os.makedirs(extraction_dir, 0o755, exist_ok=True)
     dir_path = tempfile.mkdtemp(prefix="swh.deposit-", dir=extraction_dir)
 
     # root folder to build an aggregated tarball
     aggregated_tarball_rootdir = os.path.join(dir_path, "aggregate")
     os.makedirs(aggregated_tarball_rootdir, 0o755, exist_ok=True)
 
     # uncompress in a temporary location all archives
     for archive_path in archive_paths:
         tarball.uncompress(archive_path, aggregated_tarball_rootdir)
 
     # Aggregate into one big tarball the multiple smaller ones
     temp_tarpath = shutil.make_archive(
         aggregated_tarball_rootdir, "tar", aggregated_tarball_rootdir
     )
     # can already clean up temporary directory
     shutil.rmtree(aggregated_tarball_rootdir)
 
     try:
         yield temp_tarpath
     finally:
         shutil.rmtree(dir_path)
 
 
 class APIReadArchives(APIPrivateView, APIGet, DepositReadMixin):
     """Dedicated class to read a deposit's raw archives content.
 
     Only GET is supported.
 
     """
 
     def __init__(self):
         super().__init__()
         self.extraction_dir = self.config["extraction_dir"]
         if not os.path.exists(self.extraction_dir):
             os.makedirs(self.extraction_dir)
 
     def process_get(
         self, request, collection_name: str, deposit: Deposit
     ) -> Tuple[int, Any, str]:
         """Build a unique tarball from the multiple received and stream that
            content to the client.
 
         Args:
             request (Request):
             collection_name: Collection owning the deposit
             deposit: Deposit concerned by the reading
 
         Returns:
             Tuple status, stream of content, content-type
 
         """
         archive_paths = [
             r.archive.path
             for r in self._deposit_requests(deposit, request_type=ARCHIVE_TYPE)
         ]
         return (
             status.HTTP_200_OK,
             aggregate_tarballs(self.extraction_dir, archive_paths),
             "swh/generator",
         )
 
 
 class APIReadMetadata(APIPrivateView, APIGet, DepositReadMixin):
     """Class in charge of aggregating metadata on a deposit.
 
     """
 
     def _normalize_dates(self, deposit, metadata):
         """Normalize the date to use as a tuple of author date, committer date
            from the incoming metadata.
 
         Args:
             deposit (Deposit): Deposit model representation
             metadata (Dict): Metadata dict representation
 
         Returns:
             Tuple of author date, committer date. Those dates are
             swh normalized.
 
         """
         commit_date = metadata.get("codemeta:datePublished")
         author_date = metadata.get("codemeta:dateCreated")
 
         if author_date and commit_date:
             pass
         elif commit_date:
             author_date = commit_date
         elif author_date:
             commit_date = author_date
         else:
             author_date = deposit.complete_date
             commit_date = deposit.complete_date
         return (normalize_date(author_date), normalize_date(commit_date))
 
     def metadata_read(self, deposit: Deposit) -> Dict[str, Any]:
         """Read and aggregate multiple deposit information into one unified dictionary.
 
         Args:
             deposit: Deposit to retrieve information from
 
         Returns:
             Dictionary of deposit information read by the deposit loader, with the
             following keys:
 
                 **origin** (Dict): Information about the origin
 
                 **metadata_raw** (List[str]): List of raw metadata received for the
                   deposit
 
                 **metadata_dict** (Dict): Deposit aggregated metadata into one dict
 
                 **provider** (Dict): the metadata provider information about the
                   deposit client
 
                 **tool** (Dict): the deposit information
 
                 **deposit** (Dict): deposit information relevant to build the revision
                   (author_date, committer_date, etc...)
 
         """
         metadata, raw_metadata = self._metadata_get(deposit)
         author_date, commit_date = self._normalize_dates(deposit, metadata)
 
         if deposit.parent:
             parent_swhid = deposit.parent.swhid
             assert parent_swhid is not None
-            swhid = identifiers.parse_swhid(parent_swhid)
-            parent_revision = swhid.object_id
+            swhid = identifiers.CoreSWHID.from_string(parent_swhid)
+            parent_revision = hash_to_hex(swhid.object_id)
             parents = [parent_revision]
         else:
             parents = []
 
         return {
             "origin": {"type": "deposit", "url": deposit.origin_url},
             "provider": {
                 "provider_name": deposit.client.last_name,
                 "provider_url": deposit.client.provider_url,
                 "provider_type": MetadataAuthorityType.DEPOSIT_CLIENT.value,
                 "metadata": {},
             },
             "tool": self.tool,
             "metadata_raw": raw_metadata,
             "metadata_dict": metadata,
             "deposit": {
                 "id": deposit.id,
                 "client": deposit.client.username,
                 "collection": deposit.collection.name,
                 "author": SWH_PERSON,
                 "author_date": author_date,
                 "committer": SWH_PERSON,
                 "committer_date": commit_date,
                 "revision_parents": parents,
             },
         }
 
     def process_get(
         self, request, collection_name: str, deposit: Deposit
     ) -> Tuple[int, Dict, str]:
         data = self.metadata_read(deposit)
         return status.HTTP_200_OK, data if data else {}, "application/json"
diff --git a/swh/deposit/api/private/deposit_update_status.py b/swh/deposit/api/private/deposit_update_status.py
index ac9a47b4..cdec9773 100644
--- a/swh/deposit/api/private/deposit_update_status.py
+++ b/swh/deposit/api/private/deposit_update_status.py
@@ -1,107 +1,115 @@
 # Copyright (C) 2017-2020  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from rest_framework.parsers import JSONParser
 
-from swh.model.identifiers import DIRECTORY, REVISION, SNAPSHOT, swhid
+from swh.model.hashutil import hash_to_bytes
+from swh.model.identifiers import CoreSWHID, ObjectType, QualifiedSWHID
 
 from . import APIPrivateView
 from ...errors import BAD_REQUEST, DepositError
 from ...models import DEPOSIT_STATUS_DETAIL, DEPOSIT_STATUS_LOAD_SUCCESS, Deposit
 from ..common import APIPut, ParsedRequestHeaders
 
 MANDATORY_KEYS = ["origin_url", "revision_id", "directory_id", "snapshot_id"]
 
 
 class APIUpdateStatus(APIPrivateView, APIPut):
     """Deposit request class to update the deposit's status.
 
     HTTP verbs supported: PUT
 
     """
 
     parser_classes = (JSONParser,)
 
     def additional_checks(
         self, request, headers: ParsedRequestHeaders, collection_name, deposit=None
     ):
         """Enrich existing checks to the default ones.
 
         New checks:
         - Ensure the status is provided
         - Ensure it exists
         - no missing information on load success update
 
         """
         data = request.data
         status = data.get("status")
         if not status:
             msg = "The status key is mandatory with possible values %s" % list(
                 DEPOSIT_STATUS_DETAIL.keys()
             )
             raise DepositError(BAD_REQUEST, msg)
 
         if status not in DEPOSIT_STATUS_DETAIL:
             msg = "Possible status in %s" % list(DEPOSIT_STATUS_DETAIL.keys())
             raise DepositError(BAD_REQUEST, msg)
 
         if status == DEPOSIT_STATUS_LOAD_SUCCESS:
             missing_keys = []
             for key in MANDATORY_KEYS:
                 value = data.get(key)
                 if value is None:
                     missing_keys.append(key)
 
             if missing_keys:
                 msg = (
                     f"Updating deposit status to {status}"
                     f" requires information {','.join(missing_keys)}"
                 )
                 raise DepositError(BAD_REQUEST, msg)
 
         return {}
 
     def process_put(
         self,
         request,
         headers: ParsedRequestHeaders,
         collection_name: str,
         deposit: Deposit,
     ) -> None:
         """Update the deposit with status and SWHIDs
 
         Returns:
             204 No content
             400 Bad request if checks fail
 
         """
         data = request.data
 
         status = data["status"]
         deposit.status = status
         if status == DEPOSIT_STATUS_LOAD_SUCCESS:
             origin_url = data["origin_url"]
             directory_id = data["directory_id"]
             revision_id = data["revision_id"]
-            dir_id = swhid(DIRECTORY, directory_id)
-            snp_id = swhid(SNAPSHOT, data["snapshot_id"])
-            rev_id = swhid(REVISION, revision_id)
+            dir_id = CoreSWHID(
+                object_type=ObjectType.DIRECTORY, object_id=hash_to_bytes(directory_id)
+            )
+            snp_id = CoreSWHID(
+                object_type=ObjectType.SNAPSHOT,
+                object_id=hash_to_bytes(data["snapshot_id"]),
+            )
+            rev_id = CoreSWHID(
+                object_type=ObjectType.REVISION, object_id=hash_to_bytes(revision_id)
+            )
 
-            deposit.swhid = dir_id
+            deposit.swhid = str(dir_id)
             # new id with contextual information
-            deposit.swhid_context = swhid(
-                DIRECTORY,
-                directory_id,
-                metadata={
-                    "origin": origin_url,
-                    "visit": snp_id,
-                    "anchor": rev_id,
-                    "path": "/",
-                },
+            deposit.swhid_context = str(
+                QualifiedSWHID(
+                    object_type=ObjectType.DIRECTORY,
+                    object_id=hash_to_bytes(directory_id),
+                    origin=origin_url,
+                    visit=snp_id,
+                    anchor=rev_id,
+                    path="/",
+                )
             )
         else:  # rejected
             deposit.status = status
 
         deposit.save()
diff --git a/swh/deposit/migrations/0018_migrate_swhids.py b/swh/deposit/migrations/0018_migrate_swhids.py
index a2724bee..40cec883 100644
--- a/swh/deposit/migrations/0018_migrate_swhids.py
+++ b/swh/deposit/migrations/0018_migrate_swhids.py
@@ -1,342 +1,348 @@
 # -*- coding: utf-8 -*-
 # Copyright (C) 2020  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from __future__ import unicode_literals
 
 import logging
 import os
 from typing import Any, Dict, Optional, Tuple
 
 from django.db import migrations
 
 from swh.core import config
 from swh.deposit.config import DEPOSIT_STATUS_LOAD_SUCCESS
 from swh.model.hashutil import hash_to_bytes, hash_to_hex
-from swh.model.identifiers import DIRECTORY, REVISION, SNAPSHOT, parse_swhid, swhid
+from swh.model.identifiers import CoreSWHID, ObjectType, QualifiedSWHID
 from swh.storage import get_storage as get_storage_client
 from swh.storage.algos.snapshot import snapshot_id_get_from_revision
 
 SWH_PROVIDER_URL = "https://www.softwareheritage.org"
 
 
 logger = logging.getLogger(__name__)
 
 
 swh_storage = None
 
 
 def get_storage() -> Optional[Any]:
     """Instantiate a storage client
 
     """
     settings = os.environ.get("DJANGO_SETTINGS_MODULE")
     if settings != "swh.deposit.settings.production":  # Bypass for now
         return None
 
     global swh_storage
 
     if not swh_storage:
         config_file = os.environ.get("SWH_CONFIG_FILENAME")
         if not config_file:
             raise ValueError(
                 "Production: SWH_CONFIG_FILENAME must be set to the"
                 " configuration file needed!"
             )
 
         if not os.path.exists(config_file):
             raise ValueError(
                 "Production: configuration file %s does not exist!" % (config_file,)
             )
 
         conf = config.load_named_config(config_file)
         if not conf:
             raise ValueError(
                 "Production: configuration %s does not exist." % (config_file,)
             )
 
         storage_config = conf.get("storage")
         if not storage_config:
             raise ValueError(
                 "Production: invalid configuration; missing 'storage' config entry."
             )
 
         swh_storage = get_storage_client(**storage_config)
 
     return swh_storage
 
 
-def migrate_deposit_swhid_context_not_null(apps, schema_editor):
+def migrate_deposit_swhid_context_not_null(apps, schema_editor) -> None:
     """Migrate deposit SWHIDs to the new format.
 
     Migrate deposit SWHIDs to the new format. Only deposit with status done and
     swh_id_context not null are concerned.
 
     """
     storage = get_storage()
     if not storage:
         logging.warning("Nothing to do")
         return None
 
     Deposit = apps.get_model("deposit", "Deposit")
     for deposit in Deposit.objects.filter(
         status=DEPOSIT_STATUS_LOAD_SUCCESS, swh_id_context__isnull=False
     ):
-        obj_dir = parse_swhid(deposit.swh_id_context)
-        assert obj_dir.object_type == DIRECTORY
+        obj_dir = QualifiedSWHID.from_string(deposit.swh_id_context)
+        assert obj_dir.object_type == ObjectType.DIRECTORY
 
-        obj_rev = parse_swhid(deposit.swh_anchor_id)
-        assert obj_rev.object_type == REVISION
+        obj_rev = CoreSWHID.from_string(deposit.swh_anchor_id)
+        assert obj_rev.object_type == ObjectType.REVISION
 
-        if set(obj_dir.metadata.keys()) != {"origin"}:
+        if set(obj_dir.qualifiers()) != {"origin"}:
             # Assuming the migration is already done for that deposit
             logger.warning(
                 "Deposit id %s: Migration already done, skipping", deposit.id
             )
             continue
 
         # Starting migration
 
         dir_id = obj_dir.object_id
-        origin = obj_dir.metadata["origin"]
+        origin = obj_dir.origin
+
+        assert origin
 
         check_origin = storage.origin_get([origin])[0]
         if not check_origin:
             logger.warning("Deposit id %s: Origin %s not found!", deposit.id, origin)
             continue
 
         rev_id = obj_rev.object_id
         # Find the snapshot targeting the revision
         snp_id = snapshot_id_get_from_revision(storage, origin, hash_to_bytes(rev_id))
         if snp_id is None:
             logger.warning(
                 "Deposit id %s: Snapshot targeting revision %s not found!",
                 deposit.id,
                 rev_id,
             )
             continue
 
         # Reference the old values to do some checks later
         old_swh_id = deposit.swh_id
         old_swh_id_context = deposit.swh_id_context
         old_swh_anchor_id = deposit.swh_anchor_id
         old_swh_anchor_id_context = deposit.swh_anchor_id_context
 
         # Update
-        deposit.swh_id_context = swhid(
-            DIRECTORY,
-            dir_id,
-            metadata={
-                "origin": origin,
-                "visit": swhid(SNAPSHOT, snp_id.hex()),
-                "anchor": swhid(REVISION, rev_id),
-                "path": "/",
-            },
+        deposit.swh_id_context = QualifiedSWHID(
+            object_type=ObjectType.DIRECTORY,
+            object_id=dir_id,
+            origin=origin,
+            visit=CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=snp_id),
+            anchor=CoreSWHID(
+                object_type=ObjectType.REVISION, object_id=hash_to_bytes(rev_id)
+            ),
+            path=b"/",
         )
 
         # Ensure only deposit.swh_id_context changed
         logging.debug("deposit.id: {deposit.id}")
         logging.debug("deposit.swh_id: %s -> %s", old_swh_id, deposit.swh_id)
         assert old_swh_id == deposit.swh_id
         logging.debug(
             "deposit.swh_id_context: %s -> %s",
             old_swh_id_context,
             deposit.swh_id_context,
         )
         assert old_swh_id_context != deposit.swh_id_context
         logging.debug(
             "deposit.swh_anchor_id: %s -> %s", old_swh_anchor_id, deposit.swh_anchor_id
         )
         assert old_swh_anchor_id == deposit.swh_anchor_id
         logging.debug(
             "deposit.swh_anchor_id_context: %s -> %s",
             old_swh_anchor_id_context,
             deposit.swh_anchor_id_context,
         )
         assert old_swh_anchor_id_context == deposit.swh_anchor_id_context
 
         # Commit
         deposit.save()
 
 
 def resolve_origin(deposit_id: int, provider_url: str, external_id: str) -> str:
     """Resolve the origin from provider-url and external-id
 
     For some edge case, only the external_id is used as there is some old inconsistency
     from testing which exists.
 
     """
     map_edge_case_origin: Dict[Tuple[int, str], str] = {
         (
             76,
             "hal-01588782",
         ): "https://inria.halpreprod.archives-ouvertes.fr/hal-01588782",
         (
             87,
             "hal-01588927",
         ): "https://inria.halpreprod.archives-ouvertes.fr/hal-01588927",
         (89, "hal-01588935"): "https://hal-preprod.archives-ouvertes.fr/hal-01588935",
         (
             88,
             "hal-01588928",
         ): "https://inria.halpreprod.archives-ouvertes.fr/hal-01588928",
         (
             90,
             "hal-01588942",
         ): "https://inria.halpreprod.archives-ouvertes.fr/hal-01588942",
         (143, "hal-01592430"): "https://hal-preprod.archives-ouvertes.fr/hal-01592430",
         (
             75,
             "hal-01588781",
         ): "https://inria.halpreprod.archives-ouvertes.fr/hal-01588781",
     }
     origin = map_edge_case_origin.get((deposit_id, external_id))
     if origin:
         return origin
 
     # Some simpler origin edge cases (mostly around the initial deposits)
     map_origin = {
         (
             SWH_PROVIDER_URL,
             "je-suis-gpl",
         ): "https://forge.softwareheritage.org/source/jesuisgpl/",
         (
             SWH_PROVIDER_URL,
             "external-id",
         ): "https://hal.archives-ouvertes.fr/external-id",
     }
     key = (provider_url, external_id)
     return map_origin.get(key, f"{provider_url.rstrip('/')}/{external_id}")
 
 
-def migrate_deposit_swhid_context_null(apps, schema_editor):
+def migrate_deposit_swhid_context_null(apps, schema_editor) -> None:
     """Migrate deposit SWHIDs to the new format.
 
     Migrate deposit whose swh_id_context is not set (initial deposits not migrated at
     the time). Only deposit with status done and swh_id_context null are concerned.
 
     Note: Those deposits have their swh_id being the SWHPIDs of the revision! So we can
     align them as well.
 
     """
     storage = get_storage()
     if not storage:
         logging.warning("Nothing to do")
         return None
     Deposit = apps.get_model("deposit", "Deposit")
     for deposit in Deposit.objects.filter(
         status=DEPOSIT_STATUS_LOAD_SUCCESS, swh_id_context__isnull=True
     ):
-        obj_rev = parse_swhid(deposit.swh_id)
-        if obj_rev.object_type == DIRECTORY:
+        obj_rev = CoreSWHID.from_string(deposit.swh_id)
+        if obj_rev.object_type == ObjectType.DIRECTORY:
             # Assuming the migration is already done for that deposit
             logger.warning(
                 "Deposit id %s: Migration already done, skipping", deposit.id
             )
             continue
 
         # Ensuring Migration not done
-        assert obj_rev.object_type == REVISION
+        assert obj_rev.object_type == ObjectType.REVISION
 
         assert deposit.swh_id is not None
         assert deposit.swh_id_context is None
         assert deposit.swh_anchor_id is None
         assert deposit.swh_anchor_id_context is None
 
         rev_id = obj_rev.object_id
         rev_id_bytes = hash_to_bytes(rev_id)
         revision = storage.revision_get([rev_id_bytes])[0]
         if not revision:
             logger.warning("Deposit id %s: Revision %s not found!", deposit.id, rev_id)
             continue
 
         provider_url = deposit.client.provider_url
         external_id = deposit.external_id
 
         origin = resolve_origin(deposit.id, provider_url, external_id)
         check_origin = storage.origin_get([origin])[0]
         if not check_origin:
             logger.warning("Deposit id %s: Origin %s not found!", deposit.id, origin)
             continue
 
         dir_id = hash_to_hex(revision["directory"])
 
         # Reference the old values to do some checks later
         old_swh_id = deposit.swh_id
         old_swh_id_context = deposit.swh_id_context
         old_swh_anchor_id = deposit.swh_anchor_id
         old_swh_anchor_id_context = deposit.swh_anchor_id_context
 
         # retrieve the snapshot from the archive
         snp_id = snapshot_id_get_from_revision(storage, origin, rev_id_bytes)
         if snp_id is None:
             logger.warning(
                 "Deposit id %s: Snapshot targeting revision %s not found!",
                 deposit.id,
                 rev_id,
             )
             continue
 
         # New SWHIDs ids
-        deposit.swh_id = swhid(DIRECTORY, dir_id)
-        deposit.swh_id_context = swhid(
-            DIRECTORY,
-            dir_id,
-            metadata={
-                "origin": origin,
-                "visit": swhid(SNAPSHOT, snp_id.hex()),
-                "anchor": swhid(REVISION, rev_id),
-                "path": "/",
-            },
+        deposit.swh_id = CoreSWHID(
+            object_type=ObjectType.DIRECTORY, object_id=hash_to_bytes(dir_id)
+        )
+        deposit.swh_id_context = QualifiedSWHID(
+            object_type=ObjectType.DIRECTORY,
+            object_id=dir_id,
+            origin=origin,
+            visit=CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=snp_id),
+            anchor=CoreSWHID(object_type=ObjectType.REVISION, object_id=rev_id_bytes),
+            path=b"/",
         )
         # Realign the remaining deposit SWHIDs fields
-        deposit.swh_anchor_id = swhid(REVISION, rev_id)
-        deposit.swh_anchor_id_context = swhid(
-            REVISION, rev_id, metadata={"origin": origin,}
+        deposit.swh_anchor_id = str(
+            CoreSWHID(object_type=ObjectType.REVISION, object_id=rev_id_bytes)
+        )
+        deposit.swh_anchor_id_context = str(
+            QualifiedSWHID(
+                object_type=ObjectType.REVISION, object_id=rev_id_bytes, origin=origin
+            )
         )
 
         # Ensure only deposit.swh_id_context changed
         logging.debug("deposit.id: {deposit.id}")
         logging.debug("deposit.swh_id: %s -> %s", old_swh_id, deposit.swh_id)
 
         assert old_swh_id != deposit.swh_id
         logging.debug(
             "deposit.swh_id_context: %s -> %s",
             old_swh_id_context,
             deposit.swh_id_context,
         )
         assert old_swh_id_context != deposit.swh_id_context
         assert deposit.swh_id_context is not None
         logging.debug(
             "deposit.swh_anchor_id: %s -> %s", old_swh_anchor_id, deposit.swh_anchor_id
         )
         assert deposit.swh_anchor_id == old_swh_id
         assert deposit.swh_anchor_id is not None
         logging.debug(
             "deposit.swh_anchor_id_context: %s -> %s",
             old_swh_anchor_id_context,
             deposit.swh_anchor_id_context,
         )
         assert deposit.swh_anchor_id_context is not None
 
         deposit.save()
 
 
 class Migration(migrations.Migration):
     dependencies = [
         ("deposit", "0017_auto_20190925_0906"),
     ]
 
     operations = [
         # Migrate and make the operations possibly reversible
         # https://docs.djangoproject.com/en/3.0/ref/migration-operations/#django.db.migrations.operations.RunPython.noop  # noqa
         migrations.RunPython(
             migrate_deposit_swhid_context_not_null,
             reverse_code=migrations.RunPython.noop,
         ),
         migrations.RunPython(
             migrate_deposit_swhid_context_null, reverse_code=migrations.RunPython.noop
         ),
     ]
diff --git a/swh/deposit/tests/api/test_collection_post_atom.py b/swh/deposit/tests/api/test_collection_post_atom.py
index 39f98b05..270fc50c 100644
--- a/swh/deposit/tests/api/test_collection_post_atom.py
+++ b/swh/deposit/tests/api/test_collection_post_atom.py
@@ -1,661 +1,624 @@
 # Copyright (C) 2017-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 """Tests the handling of the Atom content when doing a POST Col-IRI."""
 
 from io import BytesIO
 import uuid
 
 import attr
 from django.urls import reverse_lazy as reverse
 import pytest
 from rest_framework import status
 
 from swh.deposit.config import (
     COL_IRI,
     DEPOSIT_STATUS_DEPOSITED,
     DEPOSIT_STATUS_LOAD_SUCCESS,
     APIConfig,
 )
 from swh.deposit.models import Deposit, DepositCollection, DepositRequest
 from swh.deposit.parsers import parse_xml
 from swh.deposit.tests.common import post_atom
-from swh.deposit.utils import compute_metadata_context
-from swh.model.identifiers import SWHID, parse_swhid
+from swh.deposit.utils import compute_metadata_context, extended_swhid_from_qualified
+from swh.model.identifiers import QualifiedSWHID
 from swh.model.model import (
     MetadataAuthority,
     MetadataAuthorityType,
     MetadataFetcher,
-    MetadataTargetType,
+    Origin,
     RawExtrinsicMetadata,
 )
 from swh.storage.interface import PagedResult
 
 
 def test_post_deposit_atom_201_even_with_decimal(
     authenticated_client, deposit_collection, atom_dataset
 ):
     """Posting an initial atom entry should return 201 with deposit receipt
 
     """
     atom_error_with_decimal = atom_dataset["error-with-decimal"]
 
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=atom_error_with_decimal,
         HTTP_SLUG="external-id",
         HTTP_IN_PROGRESS="false",
     )
 
     # then
     assert response.status_code == status.HTTP_201_CREATED, response.content.decode()
 
     response_content = parse_xml(BytesIO(response.content))
     deposit_id = response_content["swh:deposit_id"]
 
     deposit = Deposit.objects.get(pk=deposit_id)
     dr = DepositRequest.objects.get(deposit=deposit)
 
     assert dr.metadata is not None
     sw_version = dr.metadata.get("codemeta:softwareVersion")
     assert sw_version == "10.4"
 
 
 def test_post_deposit_atom_400_with_empty_body(
     authenticated_client, deposit_collection, atom_dataset
 ):
     """Posting empty body request should return a 400 response
 
     """
     atom_content = atom_dataset["entry-data-empty-body"]
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=atom_content,
         HTTP_SLUG="external-id",
     )
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert b"Empty body request is not supported" in response.content
 
 
 def test_post_deposit_atom_400_badly_formatted_atom(
     authenticated_client, deposit_collection, atom_dataset
 ):
     """Posting a badly formatted atom should return a 400 response
 
     """
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=atom_dataset["entry-data-badly-formatted"],
         HTTP_SLUG="external-id",
     )
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert b"Malformed xml metadata" in response.content
 
 
 def test_post_deposit_atom_parsing_error(
     authenticated_client, deposit_collection, atom_dataset
 ):
     """Posting parsing error prone atom should return 400
 
     """
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=atom_dataset["entry-data-parsing-error-prone"],
         HTTP_SLUG="external-id",
     )
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert b"Malformed xml metadata" in response.content
 
 
 def test_post_deposit_atom_400_both_create_origin_and_add_to_origin(
     authenticated_client, deposit_collection, atom_dataset
 ):
     """Posting a badly formatted atom should return a 400 response
 
     """
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=atom_dataset["entry-data-with-both-create-origin-and-add-to-origin"],
     )
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert (
         b"&lt;swh:create_origin&gt; and &lt;swh:add_to_origin&gt; "
         b"are mutually exclusive"
     ) in response.content
 
 
 def test_post_deposit_atom_403_create_wrong_origin_url_prefix(
     authenticated_client, deposit_collection, atom_dataset, deposit_user
 ):
     """Creating an origin for a prefix not owned by the client is forbidden
 
     """
     origin_url = "http://example.org/foo"
 
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=atom_dataset["entry-data0"] % origin_url,
         HTTP_IN_PROGRESS="true",
     )
     assert response.status_code == status.HTTP_403_FORBIDDEN
     expected_msg = (
         f"Cannot create origin {origin_url}, "
         f"it must start with {deposit_user.provider_url}"
     )
     assert expected_msg in response.content.decode()
 
 
 def test_post_deposit_atom_use_slug_header(
     authenticated_client, deposit_collection, deposit_user, atom_dataset, mocker
 ):
     """Posting an atom entry with a slug header but no origin url generates
     an origin url from the slug
 
     """
     url = reverse(COL_IRI, args=[deposit_collection.name])
 
     slug = str(uuid.uuid4())
 
     # when
     response = post_atom(
         authenticated_client,
         url,
         data=atom_dataset["entry-data-no-origin-url"],
         HTTP_IN_PROGRESS="false",
         HTTP_SLUG=slug,
     )
 
     assert response.status_code == status.HTTP_201_CREATED
     response_content = parse_xml(BytesIO(response.content))
     deposit_id = response_content["swh:deposit_id"]
 
     deposit = Deposit.objects.get(pk=deposit_id)
     assert deposit.collection == deposit_collection
     assert deposit.origin_url == deposit_user.provider_url + slug
     assert deposit.status == DEPOSIT_STATUS_DEPOSITED
 
 
 def test_post_deposit_atom_no_origin_url_nor_slug_header(
     authenticated_client, deposit_collection, deposit_user, atom_dataset, mocker
 ):
     """Posting an atom entry without an origin url or a slug header should generate one
 
     """
     url = reverse(COL_IRI, args=[deposit_collection.name])
 
     slug = str(uuid.uuid4())
     mocker.patch("uuid.uuid4", return_value=slug)
 
     # when
     response = post_atom(
         authenticated_client,
         url,
         data=atom_dataset["entry-data-no-origin-url"],
         HTTP_IN_PROGRESS="false",
     )
 
     assert response.status_code == status.HTTP_201_CREATED
     response_content = parse_xml(BytesIO(response.content))
     deposit_id = response_content["swh:deposit_id"]
 
     deposit = Deposit.objects.get(pk=deposit_id)
     assert deposit.collection == deposit_collection
     assert deposit.origin_url == deposit_user.provider_url + slug
     assert deposit.status == DEPOSIT_STATUS_DEPOSITED
 
 
 def test_post_deposit_atom_with_slug_and_external_identifier(
     authenticated_client, deposit_collection, deposit_user, atom_dataset, mocker
 ):
     """Even though <external_identifier> is deprecated, it should still be
     allowed when it matches the slug, so that we don't break existing clients
 
     """
     url = reverse(COL_IRI, args=[deposit_collection.name])
 
     slug = str(uuid.uuid4())
 
     # when
     response = post_atom(
         authenticated_client,
         url,
         data=atom_dataset["error-with-external-identifier"] % slug,
         HTTP_IN_PROGRESS="false",
         HTTP_SLUG=slug,
     )
 
     assert response.status_code == status.HTTP_201_CREATED
     response_content = parse_xml(BytesIO(response.content))
     deposit_id = response_content["swh:deposit_id"]
 
     deposit = Deposit.objects.get(pk=deposit_id)
     assert deposit.collection == deposit_collection
     assert deposit.origin_url == deposit_user.provider_url + slug
     assert deposit.status == DEPOSIT_STATUS_DEPOSITED
 
 
 def test_post_deposit_atom_with_mismatched_slug_and_external_identifier(
     authenticated_client, deposit_collection, atom_dataset
 ):
     """Posting an atom entry with mismatched slug header and external_identifier
     should return a 400
 
     """
     external_id = "foobar"
     url = reverse(COL_IRI, args=[deposit_collection.name])
 
     # when
     response = post_atom(
         authenticated_client,
         url,
         data=atom_dataset["error-with-external-identifier"] % external_id,
         HTTP_IN_PROGRESS="false",
         HTTP_SLUG="something",
     )
 
     assert (
         b"The &lt;external_identifier&gt; tag and Slug header are deprecated"
         in response.content
     )
     assert response.status_code == status.HTTP_400_BAD_REQUEST
 
 
 def test_post_deposit_atom_with_create_origin_and_external_identifier(
     authenticated_client, deposit_collection, atom_dataset, deposit_user
 ):
     """<atom:external_identifier> was deprecated before <swh:create_origin>
     was introduced, clients should get an error when trying to use both
 
     """
     external_id = "foobar"
     origin_url = deposit_user.provider_url + external_id
     url = reverse(COL_IRI, args=[deposit_collection.name])
 
     document = atom_dataset["error-with-external-identifier-and-create-origin"].format(
         external_id=external_id, url=origin_url,
     )
 
     # when
     response = post_atom(
         authenticated_client, url, data=document, HTTP_IN_PROGRESS="false",
     )
 
     assert b"&lt;external_identifier&gt; is deprecated" in response.content
     assert response.status_code == status.HTTP_400_BAD_REQUEST
 
 
 def test_post_deposit_atom_with_create_origin_and_reference(
     authenticated_client, deposit_collection, atom_dataset, deposit_user
 ):
     """<swh:reference> and <swh:create_origin> are mutually exclusive
 
     """
     external_id = "foobar"
     origin_url = deposit_user.provider_url + external_id
     url = reverse(COL_IRI, args=[deposit_collection.name])
 
     document = atom_dataset["error-with-reference-and-create-origin"].format(
         external_id=external_id, url=origin_url,
     )
 
     # when
     response = post_atom(
         authenticated_client, url, data=document, HTTP_IN_PROGRESS="false",
     )
 
     assert b"only one may be used on a given deposit" in response.content
     assert response.status_code == status.HTTP_400_BAD_REQUEST
 
 
 def test_post_deposit_atom_unknown_collection(authenticated_client, atom_dataset):
     """Posting an atom entry to an unknown collection should return a 404
 
     """
     unknown_collection = "unknown-one"
     with pytest.raises(DepositCollection.DoesNotExist):
         DepositCollection.objects.get(name=unknown_collection)
 
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[unknown_collection]),
         data=atom_dataset["entry-data0"],
         HTTP_SLUG="something",
     )
     assert response.status_code == status.HTTP_404_NOT_FOUND
     assert b"Unknown collection" in response.content
 
 
 def test_post_deposit_atom_entry_initial(
     authenticated_client, deposit_collection, atom_dataset, deposit_user
 ):
     """Posting an initial atom entry should return 201 with deposit receipt
 
     """
     # given
     origin_url = deposit_user.provider_url + "1225c695-cfb8-4ebb-aaaa-80da344efa6a"
 
     with pytest.raises(Deposit.DoesNotExist):
         Deposit.objects.get(origin_url=origin_url)
 
     atom_entry_data = atom_dataset["entry-data0"] % origin_url
 
     # when
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=atom_entry_data,
         HTTP_IN_PROGRESS="false",
     )
 
     # then
     assert response.status_code == status.HTTP_201_CREATED, response.content.decode()
 
     response_content = parse_xml(BytesIO(response.content))
     deposit_id = response_content["swh:deposit_id"]
 
     deposit = Deposit.objects.get(pk=deposit_id)
     assert deposit.collection == deposit_collection
     assert deposit.origin_url == origin_url
     assert deposit.status == DEPOSIT_STATUS_DEPOSITED
 
     # one associated request to a deposit
     deposit_request = DepositRequest.objects.get(deposit=deposit)
     assert deposit_request.metadata is not None
     assert deposit_request.raw_metadata == atom_entry_data
     assert bool(deposit_request.archive) is False
 
 
 def test_post_deposit_atom_entry_with_codemeta(
     authenticated_client, deposit_collection, atom_dataset, deposit_user
 ):
     """Posting an initial atom entry should return 201 with deposit receipt
 
     """
     # given
     origin_url = deposit_user.provider_url + "1225c695-cfb8-4ebb-aaaa-80da344efa6a"
 
     with pytest.raises(Deposit.DoesNotExist):
         Deposit.objects.get(origin_url=origin_url)
 
     atom_entry_data = atom_dataset["codemeta-sample"] % origin_url
     # when
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=atom_entry_data,
         HTTP_IN_PROGRESS="false",
     )
 
     # then
     assert response.status_code == status.HTTP_201_CREATED
 
     response_content = parse_xml(BytesIO(response.content))
 
     deposit_id = response_content["swh:deposit_id"]
 
     deposit = Deposit.objects.get(pk=deposit_id)
     assert deposit.collection == deposit_collection
     assert deposit.origin_url == origin_url
     assert deposit.status == DEPOSIT_STATUS_DEPOSITED
 
     # one associated request to a deposit
     deposit_request = DepositRequest.objects.get(deposit=deposit)
     assert deposit_request.metadata is not None
     assert deposit_request.raw_metadata == atom_entry_data
     assert bool(deposit_request.archive) is False
 
 
 def test_deposit_metadata_invalid(
     authenticated_client, deposit_collection, atom_dataset
 ):
     """Posting invalid swhid reference is bad request returned to client
 
     """
     invalid_swhid = "swh:1:dir :31b5c8cc985d190b5a7ef4878128ebfdc2358f49"
     xml_data = atom_dataset["entry-data-with-swhid"].format(swhid=invalid_swhid)
 
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=xml_data,
     )
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert b"Invalid SWHID reference" in response.content
 
 
 def test_deposit_metadata_fails_functional_checks(
     authenticated_client, deposit_collection, atom_dataset
 ):
     """Posting functionally invalid metadata swhid is bad request returned to client
 
     """
     swhid = "swh:1:dir:31b5c8cc985d190b5a7ef4878128ebfdc2358f49"
     invalid_xml_data = atom_dataset[
         "entry-data-with-swhid-fail-metadata-functional-checks"
     ].format(swhid=swhid)
 
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=invalid_xml_data,
     )
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert b"Functional metadata checks failure" in response.content
 
 
 @pytest.mark.parametrize(
-    "swhid,target_type",
+    "swhid",
     [
-        (
-            "swh:1:cnt:01b5c8cc985d190b5a7ef4878128ebfdc2358f49",
-            MetadataTargetType.CONTENT,
-        ),
-        (
-            "swh:1:dir:11b5c8cc985d190b5a7ef4878128ebfdc2358f49",
-            MetadataTargetType.DIRECTORY,
-        ),
-        (
-            "swh:1:rev:21b5c8cc985d190b5a7ef4878128ebfdc2358f49",
-            MetadataTargetType.REVISION,
-        ),
-        (
-            "swh:1:rel:31b5c8cc985d190b5a7ef4878128ebfdc2358f49",
-            MetadataTargetType.RELEASE,
-        ),
-        (
-            "swh:1:snp:41b5c8cc985d190b5a7ef4878128ebfdc2358f49",
-            MetadataTargetType.SNAPSHOT,
-        ),
-        (
-            "swh:1:cnt:51b5c8cc985d190b5a7ef4878128ebfdc2358f49;origin=h://g.c/o/repo",
-            MetadataTargetType.CONTENT,
-        ),
-        (
-            "swh:1:dir:c4993c872593e960dc84e4430dbbfbc34fd706d0;origin=https://inria.halpreprod.archives-ouvertes.fr/hal-01243573;visit=swh:1:snp:0175049fc45055a3824a1675ac06e3711619a55a;anchor=swh:1:rev:b5f505b005435fa5c4fa4c279792bd7b17167c04;path=/",  # noqa
-            MetadataTargetType.DIRECTORY,
-        ),
-        (
-            "swh:1:rev:71b5c8cc985d190b5a7ef4878128ebfdc2358f49;origin=h://g.c/o/repo",
-            MetadataTargetType.REVISION,
-        ),
-        (
-            "swh:1:rel:81b5c8cc985d190b5a7ef4878128ebfdc2358f49;origin=h://g.c/o/repo",
-            MetadataTargetType.RELEASE,
-        ),
-        (
-            "swh:1:snp:91b5c8cc985d190b5a7ef4878128ebfdc2358f49;origin=h://g.c/o/repo",
-            MetadataTargetType.SNAPSHOT,
-        ),
+        "swh:1:cnt:01b5c8cc985d190b5a7ef4878128ebfdc2358f49",
+        "swh:1:dir:11b5c8cc985d190b5a7ef4878128ebfdc2358f49",
+        "swh:1:rev:21b5c8cc985d190b5a7ef4878128ebfdc2358f49",
+        "swh:1:rel:31b5c8cc985d190b5a7ef4878128ebfdc2358f49",
+        "swh:1:snp:41b5c8cc985d190b5a7ef4878128ebfdc2358f49",
+        "swh:1:cnt:51b5c8cc985d190b5a7ef4878128ebfdc2358f49;origin=h://g.c/o/repo",
+        "swh:1:dir:c4993c872593e960dc84e4430dbbfbc34fd706d0;origin=https://inria.halpreprod.archives-ouvertes.fr/hal-01243573;visit=swh:1:snp:0175049fc45055a3824a1675ac06e3711619a55a;anchor=swh:1:rev:b5f505b005435fa5c4fa4c279792bd7b17167c04;path=/",  # noqa
+        "swh:1:rev:71b5c8cc985d190b5a7ef4878128ebfdc2358f49;origin=h://g.c/o/repo",
+        "swh:1:rel:81b5c8cc985d190b5a7ef4878128ebfdc2358f49;origin=h://g.c/o/repo",
+        "swh:1:snp:91b5c8cc985d190b5a7ef4878128ebfdc2358f49;origin=h://g.c/o/repo",
     ],
 )
 def test_deposit_metadata_swhid(
-    swhid,
-    target_type,
-    authenticated_client,
-    deposit_collection,
-    atom_dataset,
-    swh_storage,
+    swhid, authenticated_client, deposit_collection, atom_dataset, swh_storage,
 ):
     """Posting a swhid reference is stored on raw extrinsic metadata storage
 
     """
-    swhid_reference = parse_swhid(swhid)
-    swhid_core = attr.evolve(swhid_reference, metadata={})
+    swhid_reference = QualifiedSWHID.from_string(swhid)
+    swhid_target = extended_swhid_from_qualified(swhid_reference)
 
     xml_data = atom_dataset["entry-data-with-swhid"].format(swhid=swhid)
     deposit_client = authenticated_client.deposit_client
 
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=xml_data,
     )
 
     assert response.status_code == status.HTTP_201_CREATED
     response_content = parse_xml(BytesIO(response.content))
 
     # Ensure the deposit is finalized
     deposit_id = int(response_content["swh:deposit_id"])
     deposit = Deposit.objects.get(pk=deposit_id)
-    assert isinstance(swhid_core, SWHID)
-    assert deposit.swhid == str(swhid_core)
+    assert deposit.swhid == str(swhid_target)
     assert deposit.swhid_context == str(swhid_reference)
     assert deposit.complete_date == deposit.reception_date
     assert deposit.complete_date is not None
     assert deposit.status == DEPOSIT_STATUS_LOAD_SUCCESS
 
     # Ensure metadata stored in the metadata storage is consistent
     metadata_authority = MetadataAuthority(
         type=MetadataAuthorityType.DEPOSIT_CLIENT,
         url=deposit_client.provider_url,
         metadata={"name": deposit_client.last_name},
     )
 
     actual_authority = swh_storage.metadata_authority_get(
         MetadataAuthorityType.DEPOSIT_CLIENT, url=deposit_client.provider_url
     )
     assert actual_authority == metadata_authority
 
     config = APIConfig()
     metadata_fetcher = MetadataFetcher(
         name=config.tool["name"],
         version=config.tool["version"],
         metadata=config.tool["configuration"],
     )
 
     actual_fetcher = swh_storage.metadata_fetcher_get(
         config.tool["name"], config.tool["version"]
     )
     assert actual_fetcher == metadata_fetcher
 
     page_results = swh_storage.raw_extrinsic_metadata_get(
-        target_type, swhid_core, metadata_authority
+        swhid_target, metadata_authority
     )
     discovery_date = page_results.results[0].discovery_date
 
     assert len(page_results.results) == 1
     assert page_results.next_page_token is None
 
-    object_type, metadata_context = compute_metadata_context(swhid_reference)
+    metadata_context = compute_metadata_context(swhid_reference)
     assert page_results == PagedResult(
         results=[
             RawExtrinsicMetadata(
-                type=object_type,
-                target=swhid_core,
+                target=swhid_target,
                 discovery_date=discovery_date,
                 authority=attr.evolve(metadata_authority, metadata=None),
                 fetcher=attr.evolve(metadata_fetcher, metadata=None),
                 format="sword-v2-atom-codemeta",
                 metadata=xml_data.encode(),
                 **metadata_context,
             )
         ],
         next_page_token=None,
     )
     assert deposit.complete_date == discovery_date
 
 
 @pytest.mark.parametrize(
     "url", ["https://gitlab.org/user/repo", "https://whatever.else/repo",]
 )
 def test_deposit_metadata_origin(
     url, authenticated_client, deposit_collection, atom_dataset, swh_storage,
 ):
     """Posting a swhid reference is stored on raw extrinsic metadata storage
 
     """
     xml_data = atom_dataset["entry-data-with-origin-reference"].format(url=url)
+    origin_swhid = Origin(url).swhid()
     deposit_client = authenticated_client.deposit_client
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=xml_data,
     )
 
     assert response.status_code == status.HTTP_201_CREATED
     response_content = parse_xml(BytesIO(response.content))
     # Ensure the deposit is finalized
     deposit_id = int(response_content["swh:deposit_id"])
     deposit = Deposit.objects.get(pk=deposit_id)
     # we got not swhid as input so we cannot have those
     assert deposit.swhid is None
     assert deposit.swhid_context is None
     assert deposit.complete_date == deposit.reception_date
     assert deposit.complete_date is not None
     assert deposit.status == DEPOSIT_STATUS_LOAD_SUCCESS
 
     # Ensure metadata stored in the metadata storage is consistent
     metadata_authority = MetadataAuthority(
         type=MetadataAuthorityType.DEPOSIT_CLIENT,
         url=deposit_client.provider_url,
         metadata={"name": deposit_client.last_name},
     )
 
     actual_authority = swh_storage.metadata_authority_get(
         MetadataAuthorityType.DEPOSIT_CLIENT, url=deposit_client.provider_url
     )
     assert actual_authority == metadata_authority
 
     config = APIConfig()
     metadata_fetcher = MetadataFetcher(
         name=config.tool["name"],
         version=config.tool["version"],
         metadata=config.tool["configuration"],
     )
 
     actual_fetcher = swh_storage.metadata_fetcher_get(
         config.tool["name"], config.tool["version"]
     )
     assert actual_fetcher == metadata_fetcher
 
     page_results = swh_storage.raw_extrinsic_metadata_get(
-        MetadataTargetType.ORIGIN, url, metadata_authority
+        origin_swhid, metadata_authority
     )
     discovery_date = page_results.results[0].discovery_date
 
     assert len(page_results.results) == 1
     assert page_results.next_page_token is None
 
     assert page_results == PagedResult(
         results=[
             RawExtrinsicMetadata(
-                type=MetadataTargetType.ORIGIN,
-                target=url,
+                target=origin_swhid,
                 discovery_date=discovery_date,
                 authority=attr.evolve(metadata_authority, metadata=None),
                 fetcher=attr.evolve(metadata_fetcher, metadata=None),
                 format="sword-v2-atom-codemeta",
                 metadata=xml_data.encode(),
             )
         ],
         next_page_token=None,
     )
     assert deposit.complete_date == discovery_date
diff --git a/swh/deposit/tests/api/test_deposit_private_update_status.py b/swh/deposit/tests/api/test_deposit_private_update_status.py
index a47ced46..c054c4ce 100644
--- a/swh/deposit/tests/api/test_deposit_private_update_status.py
+++ b/swh/deposit/tests/api/test_deposit_private_update_status.py
@@ -1,196 +1,195 @@
 # Copyright (C) 2017-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import copy
 import json
 
 from django.urls import reverse_lazy as reverse
 from rest_framework import status
 
 from swh.deposit.api.private.deposit_update_status import MANDATORY_KEYS
 from swh.deposit.config import (
     DEPOSIT_STATUS_LOAD_FAILURE,
     DEPOSIT_STATUS_LOAD_SUCCESS,
     PRIVATE_PUT_DEPOSIT,
 )
 from swh.deposit.models import Deposit
-from swh.model.identifiers import DIRECTORY, REVISION, SNAPSHOT, swhid
 
 PRIVATE_PUT_DEPOSIT_NC = PRIVATE_PUT_DEPOSIT + "-nc"
 
 
 def private_check_url_endpoints(collection, deposit):
     """There are 2 endpoints to check (one with collection, one without)"""
     return [
         reverse(PRIVATE_PUT_DEPOSIT, args=[collection.name, deposit.id]),
         reverse(PRIVATE_PUT_DEPOSIT_NC, args=[deposit.id]),
     ]
 
 
 def test_update_deposit_status_success_with_info(
     authenticated_client, deposit_collection, ready_deposit_verified
 ):
     """Update deposit with load success should require all information to succeed
 
     """
     deposit = ready_deposit_verified
     expected_status = DEPOSIT_STATUS_LOAD_SUCCESS
     origin_url = "something"
     directory_id = "42a13fc721c8716ff695d0d62fc851d641f3a12b"
     revision_id = "47dc6b4636c7f6cba0df83e3d5490bf4334d987e"
     snapshot_id = "68c0d26104d47e278dd6be07ed61fafb561d0d20"
 
     full_body_info = {
         "status": DEPOSIT_STATUS_LOAD_SUCCESS,
         "revision_id": revision_id,
         "directory_id": directory_id,
         "snapshot_id": snapshot_id,
         "origin_url": origin_url,
     }
     for url in private_check_url_endpoints(deposit_collection, deposit):
-        dir_id = swhid(DIRECTORY, directory_id)
-        rev_id = swhid(REVISION, revision_id)
-        snp_id = swhid(SNAPSHOT, snapshot_id)
-
         expected_swhid = "swh:1:dir:%s" % directory_id
         expected_swhid_context = (
-            f"{dir_id};origin={origin_url};" + f"visit={snp_id};anchor={rev_id};path=/"
+            f"{expected_swhid}"
+            f";origin={origin_url}"
+            f";visit=swh:1:snp:{snapshot_id}"
+            f";anchor=swh:1:rev:{revision_id}"
+            f";path=/"
         )
 
         response = authenticated_client.put(
             url, content_type="application/json", data=json.dumps(full_body_info),
         )
 
         assert response.status_code == status.HTTP_204_NO_CONTENT
 
         deposit = Deposit.objects.get(pk=deposit.id)
         assert deposit.status == expected_status
         assert deposit.swhid == expected_swhid
         assert deposit.swhid_context == expected_swhid_context
 
         # Reset deposit
         deposit = ready_deposit_verified
         deposit.save()
 
 
 def test_update_deposit_status_rejected_with_info(
     authenticated_client, deposit_collection, ready_deposit_verified
 ):
     """Update deposit with rejected status needs few information to succeed
 
     """
     deposit = ready_deposit_verified
 
     for url in private_check_url_endpoints(deposit_collection, deposit):
         response = authenticated_client.put(
             url,
             content_type="application/json",
             data=json.dumps({"status": DEPOSIT_STATUS_LOAD_FAILURE}),
         )
 
         assert response.status_code == status.HTTP_204_NO_CONTENT
 
         deposit = Deposit.objects.get(pk=deposit.id)
         assert deposit.status == DEPOSIT_STATUS_LOAD_FAILURE
 
         assert deposit.swhid is None
         assert deposit.swhid_context is None
 
         # Reset status
         deposit = ready_deposit_verified
         deposit.save()
 
 
 def test_update_deposit_status_success_with_incomplete_data(
     authenticated_client, deposit_collection, ready_deposit_verified
 ):
     """Update deposit status with status success and incomplete information should fail
 
     """
     deposit = ready_deposit_verified
 
     origin_url = "something"
     directory_id = "42a13fc721c8716ff695d0d62fc851d641f3a12b"
     revision_id = "47dc6b4636c7f6cba0df83e3d5490bf4334d987e"
     snapshot_id = "68c0d26104d47e278dd6be07ed61fafb561d0d20"
 
     new_status = DEPOSIT_STATUS_LOAD_SUCCESS
     full_body_info = {
         "status": new_status,
         "revision_id": revision_id,
         "directory_id": directory_id,
         "snapshot_id": snapshot_id,
         "origin_url": origin_url,
     }
 
     for url in private_check_url_endpoints(deposit_collection, deposit):
         for key in MANDATORY_KEYS:
             # Crafting body with missing information so that it raises
             body = copy.deepcopy(full_body_info)
             body.pop(key)  # make the body incomplete
 
             response = authenticated_client.put(
                 url, content_type="application/json", data=json.dumps(body),
             )
 
             assert response.status_code == status.HTTP_400_BAD_REQUEST
             assert (
                 f"deposit status to {new_status} requires information {key}"
                 in response.content.decode("utf-8")
             )
 
 
 def test_update_deposit_status_will_fail_with_unknown_status(
     authenticated_client, deposit_collection, ready_deposit_verified
 ):
     """Unknown status for update should return a 400 response
 
     """
     deposit = ready_deposit_verified
     for url in private_check_url_endpoints(deposit_collection, deposit):
         response = authenticated_client.put(
             url, content_type="application/json", data=json.dumps({"status": "unknown"})
         )
 
         assert response.status_code == status.HTTP_400_BAD_REQUEST
         assert b"Possible status in " in response.content
 
 
 def test_update_deposit_status_will_fail_with_no_status_key(
     authenticated_client, deposit_collection, ready_deposit_verified
 ):
     """No status provided for update should return a 400 response
 
     """
     deposit = ready_deposit_verified
     for url in private_check_url_endpoints(deposit_collection, deposit):
         response = authenticated_client.put(
             url,
             content_type="application/json",
             data=json.dumps({"something": "something"}),
         )
 
         assert response.status_code == status.HTTP_400_BAD_REQUEST
         assert b"The status key is mandatory with possible values" in response.content
 
 
 def test_update_deposit_status_success_without_swhid_fail(
     authenticated_client, deposit_collection, ready_deposit_verified
 ):
     """Providing successful status without swhid should return a 400
 
     """
     deposit = ready_deposit_verified
     for url in private_check_url_endpoints(deposit_collection, deposit):
         response = authenticated_client.put(
             url,
             content_type="application/json",
             data=json.dumps({"status": DEPOSIT_STATUS_LOAD_SUCCESS}),
         )
 
         assert response.status_code == status.HTTP_400_BAD_REQUEST
         assert (
             b"Updating deposit status to done requires information" in response.content
         )
diff --git a/swh/deposit/tests/api/test_deposit_update_atom.py b/swh/deposit/tests/api/test_deposit_update_atom.py
index 81004ae0..674a1032 100644
--- a/swh/deposit/tests/api/test_deposit_update_atom.py
+++ b/swh/deposit/tests/api/test_deposit_update_atom.py
@@ -1,608 +1,606 @@
 # Copyright (C) 2017-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from io import BytesIO
 
 import attr
 from django.urls import reverse_lazy as reverse
 import pytest
 from rest_framework import status
 
 from swh.deposit.api.common import ACCEPT_ARCHIVE_CONTENT_TYPES
 from swh.deposit.config import (
     COL_IRI,
     DEPOSIT_STATUS_DEPOSITED,
     EDIT_IRI,
     EM_IRI,
     SE_IRI,
     APIConfig,
 )
 from swh.deposit.models import Deposit, DepositCollection, DepositRequest
 from swh.deposit.parsers import parse_xml
 from swh.deposit.tests.common import post_atom, put_atom
 from swh.model.hashutil import hash_to_bytes
-from swh.model.identifiers import parse_swhid, swhid
+from swh.model.identifiers import CoreSWHID, ExtendedSWHID, ObjectType
 from swh.model.model import (
     MetadataAuthority,
     MetadataAuthorityType,
     MetadataFetcher,
-    MetadataTargetType,
     RawExtrinsicMetadata,
 )
 from swh.storage.interface import PagedResult
 
 
 def test_post_deposit_atom_entry_multiple_steps(
     authenticated_client, deposit_collection, atom_dataset, deposit_user
 ):
     """After initial deposit, updating a deposit should return a 201
 
     """
     # given
     origin_url = deposit_user.provider_url + "2225c695-cfb8-4ebb-aaaa-80da344efa6a"
 
     with pytest.raises(Deposit.DoesNotExist):
         deposit = Deposit.objects.get(origin_url=origin_url)
 
     # when
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=atom_dataset["entry-data1"],
         HTTP_IN_PROGRESS="True",
     )
 
     # then
     assert response.status_code == status.HTTP_201_CREATED
 
     response_content = parse_xml(BytesIO(response.content))
     deposit_id = int(response_content["swh:deposit_id"])
 
     deposit = Deposit.objects.get(pk=deposit_id)
     assert deposit.collection == deposit_collection
     assert deposit.origin_url is None  # not provided yet
     assert deposit.status == "partial"
 
     # one associated request to a deposit
     deposit_requests = DepositRequest.objects.filter(deposit=deposit)
     assert len(deposit_requests) == 1
 
     atom_entry_data = atom_dataset["entry-only-create-origin"] % (origin_url)
 
     for link in response_content["atom:link"]:
         if link["@rel"] == "http://purl.org/net/sword/terms/add":
             se_iri = link["@href"]
             break
     else:
         assert False, f"missing SE-IRI from {response_content['link']}"
 
     # when updating the first deposit post
     response = post_atom(
         authenticated_client, se_iri, data=atom_entry_data, HTTP_IN_PROGRESS="False",
     )
 
     # then
     assert response.status_code == status.HTTP_201_CREATED, response.content.decode()
 
     response_content = parse_xml(BytesIO(response.content))
     deposit_id = int(response_content["swh:deposit_id"])
 
     deposit = Deposit.objects.get(pk=deposit_id)
     assert deposit.collection == deposit_collection
     assert deposit.origin_url == origin_url
     assert deposit.status == DEPOSIT_STATUS_DEPOSITED
 
     assert len(Deposit.objects.all()) == 1
 
     # now 2 associated requests to a same deposit
     deposit_requests = DepositRequest.objects.filter(deposit=deposit).order_by("id")
     assert len(deposit_requests) == 2
 
     atom_entry_data1 = atom_dataset["entry-data1"]
     expected_meta = [
         {"metadata": parse_xml(atom_entry_data1), "raw_metadata": atom_entry_data1},
         {"metadata": parse_xml(atom_entry_data), "raw_metadata": atom_entry_data},
     ]
 
     for i, deposit_request in enumerate(deposit_requests):
         actual_metadata = deposit_request.metadata
         assert actual_metadata == expected_meta[i]["metadata"]
         assert deposit_request.raw_metadata == expected_meta[i]["raw_metadata"]
         assert bool(deposit_request.archive) is False
 
 
 def test_replace_metadata_to_deposit_is_possible(
     tmp_path,
     authenticated_client,
     partial_deposit_with_metadata,
     deposit_collection,
     atom_dataset,
     deposit_user,
 ):
     """Replace all metadata with another one should return a 204 response
 
     """
     # given
     deposit = partial_deposit_with_metadata
     origin_url = deposit_user.provider_url + deposit.external_id
     raw_metadata0 = atom_dataset["entry-data0"] % origin_url
 
     requests_meta = DepositRequest.objects.filter(deposit=deposit, type="metadata")
     assert len(requests_meta) == 1
     request_meta0 = requests_meta[0]
     assert request_meta0.raw_metadata == raw_metadata0
 
     requests_archive0 = DepositRequest.objects.filter(deposit=deposit, type="archive")
     assert len(requests_archive0) == 1
 
     update_uri = reverse(EDIT_IRI, args=[deposit_collection.name, deposit.id])
 
     response = put_atom(
         authenticated_client, update_uri, data=atom_dataset["entry-data1"],
     )
 
     assert response.status_code == status.HTTP_204_NO_CONTENT
 
     requests_meta = DepositRequest.objects.filter(deposit=deposit, type="metadata")
 
     assert len(requests_meta) == 1
     request_meta1 = requests_meta[0]
     raw_metadata1 = request_meta1.raw_metadata
     assert raw_metadata1 == atom_dataset["entry-data1"]
     assert raw_metadata0 != raw_metadata1
     assert request_meta0 != request_meta1
 
     # check we did not touch the other parts
     requests_archive1 = DepositRequest.objects.filter(deposit=deposit, type="archive")
     assert len(requests_archive1) == 1
     assert set(requests_archive0) == set(requests_archive1)
 
 
 def test_add_metadata_to_deposit_is_possible(
     authenticated_client,
     deposit_collection,
     partial_deposit_with_metadata,
     atom_dataset,
     deposit_user,
 ):
     """Add metadata with another one should return a 204 response
 
     """
     deposit = partial_deposit_with_metadata
     origin_url = deposit_user.provider_url + deposit.external_id
     requests = DepositRequest.objects.filter(deposit=deposit, type="metadata")
 
     assert len(requests) == 1
 
     requests_archive0 = DepositRequest.objects.filter(deposit=deposit, type="archive")
     assert len(requests_archive0) == 1
 
     update_uri = reverse(SE_IRI, args=[deposit_collection.name, deposit.id])
 
     atom_entry = atom_dataset["entry-data1"]
     response = post_atom(authenticated_client, update_uri, data=atom_entry)
 
     assert response.status_code == status.HTTP_201_CREATED
 
     requests = DepositRequest.objects.filter(deposit=deposit, type="metadata").order_by(
         "id"
     )
 
     assert len(requests) == 2
     expected_raw_meta0 = atom_dataset["entry-data0"] % origin_url
     # a new one was added
     assert requests[0].raw_metadata == expected_raw_meta0
     assert requests[1].raw_metadata == atom_entry
 
     # check we did not touch the other parts
     requests_archive1 = DepositRequest.objects.filter(deposit=deposit, type="archive")
     assert len(requests_archive1) == 1
     assert set(requests_archive0) == set(requests_archive1)
 
 
 def test_add_metadata_to_unknown_deposit(
     deposit_collection, authenticated_client, atom_dataset
 ):
     """Replacing metadata to unknown deposit should return a 404 response
 
     """
     unknown_deposit_id = 1000
     try:
         Deposit.objects.get(pk=unknown_deposit_id)
     except Deposit.DoesNotExist:
         assert True
 
     url = reverse(SE_IRI, args=[deposit_collection, unknown_deposit_id])
     response = post_atom(authenticated_client, url, data=atom_dataset["entry-data1"],)
     assert response.status_code == status.HTTP_404_NOT_FOUND
     response_content = parse_xml(response.content)
     assert (
         "Deposit 1000 does not exist" in response_content["sword:error"]["atom:summary"]
     )
 
 
 def test_add_metadata_to_unknown_collection(
     partial_deposit, authenticated_client, atom_dataset
 ):
     """Replacing metadata to unknown deposit should return a 404 response
 
     """
     deposit = partial_deposit
     unknown_collection_name = "unknown-collection"
     try:
         DepositCollection.objects.get(name=unknown_collection_name)
     except DepositCollection.DoesNotExist:
         assert True
 
     url = reverse(SE_IRI, args=[unknown_collection_name, deposit.id])
     response = post_atom(authenticated_client, url, data=atom_dataset["entry-data1"],)
     assert response.status_code == status.HTTP_404_NOT_FOUND
     response_content = parse_xml(response.content)
     assert "Unknown collection name" in response_content["sword:error"]["atom:summary"]
 
 
 def test_replace_metadata_to_unknown_deposit(
     authenticated_client, deposit_collection, atom_dataset
 ):
     """Adding metadata to unknown deposit should return a 404 response
 
     """
     unknown_deposit_id = 998
     try:
         Deposit.objects.get(pk=unknown_deposit_id)
     except Deposit.DoesNotExist:
         assert True
     url = reverse(EDIT_IRI, args=[deposit_collection.name, unknown_deposit_id])
     response = put_atom(authenticated_client, url, data=atom_dataset["entry-data1"],)
     assert response.status_code == status.HTTP_404_NOT_FOUND
     response_content = parse_xml(response.content)
     assert (
         "Deposit %s does not exist" % unknown_deposit_id
         == response_content["sword:error"]["atom:summary"]
     )
 
 
 def test_post_metadata_to_em_iri_failure(
     authenticated_client, deposit_collection, partial_deposit, atom_dataset
 ):
     """Update (POST) archive with wrong content type should return 400
 
     """
     deposit = partial_deposit
     update_uri = reverse(EM_IRI, args=[deposit_collection.name, deposit.id])
     response = authenticated_client.post(
         update_uri,
         content_type="application/x-gtar-compressed",
         data=atom_dataset["entry-data1"],
     )
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert b"Packaging format supported is restricted" in response.content
     for supported_format in ACCEPT_ARCHIVE_CONTENT_TYPES:
         assert supported_format.encode() in response.content
 
 
 def test_put_metadata_to_em_iri_failure(
     authenticated_client, deposit_collection, partial_deposit, atom_dataset
 ):
     """Update (PUT) archive with wrong content type should return 400
 
     """
     # given
     deposit = partial_deposit
     # when
     update_uri = reverse(EM_IRI, args=[deposit_collection.name, deposit.id])
     response = put_atom(
         authenticated_client, update_uri, data=atom_dataset["entry-data1"],
     )
     # then
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert b"Packaging format supported is restricted" in response.content
     for supported_format in ACCEPT_ARCHIVE_CONTENT_TYPES:
         assert supported_format.encode() in response.content
 
 
 def test_put_update_metadata_done_deposit_nominal(
     tmp_path,
     authenticated_client,
     complete_deposit,
     deposit_collection,
     atom_dataset,
     sample_data,
     swh_storage,
 ):
     """Nominal scenario, client send an update of metadata on a deposit with status "done"
        with an existing swhid. Such swhid has its metadata updated accordingly both in
        the deposit backend and in the metadata storage.
 
        Response: 204
 
     """
-    deposit_swhid = parse_swhid(complete_deposit.swhid)
-    assert deposit_swhid.object_type == "directory"
+    deposit_swhid = CoreSWHID.from_string(complete_deposit.swhid)
+    assert deposit_swhid.object_type == ObjectType.DIRECTORY
     directory_id = hash_to_bytes(deposit_swhid.object_id)
 
     # directory targeted by the complete_deposit does not exist in the storage
     assert list(swh_storage.directory_missing([directory_id])) == [directory_id]
 
     # so let's create a directory reference in the storage (current deposit targets an
     # unknown swhid)
     existing_directory = sample_data.directory
     swh_storage.directory_add([existing_directory])
     assert list(swh_storage.directory_missing([existing_directory.id])) == []
 
     # and patch one complete deposit swhid so it targets said reference
-    complete_deposit.swhid = swhid("directory", existing_directory.id)
+    complete_deposit.swhid = str(existing_directory.swhid())
     complete_deposit.save()
 
     actual_existing_requests_archive = DepositRequest.objects.filter(
         deposit=complete_deposit, type="archive"
     )
     nb_archives = len(actual_existing_requests_archive)
     actual_existing_requests_metadata = DepositRequest.objects.filter(
         deposit=complete_deposit, type="metadata"
     )
     nb_metadata = len(actual_existing_requests_metadata)
 
     update_uri = reverse(EDIT_IRI, args=[deposit_collection.name, complete_deposit.id])
     response = put_atom(
         authenticated_client,
         update_uri,
         data=atom_dataset["entry-data1"],
         HTTP_X_CHECK_SWHID=complete_deposit.swhid,
     )
 
     assert response.status_code == status.HTTP_204_NO_CONTENT
 
     new_requests_meta = DepositRequest.objects.filter(
         deposit=complete_deposit, type="metadata"
     )
     assert len(new_requests_meta) == nb_metadata + 1
     request_meta1 = new_requests_meta[0]
     raw_metadata1 = request_meta1.raw_metadata
     assert raw_metadata1 == atom_dataset["entry-data1"]
 
     # check we did not touch the other parts
     requests_archive1 = DepositRequest.objects.filter(
         deposit=complete_deposit, type="archive"
     )
     assert len(requests_archive1) == nb_archives
     assert set(actual_existing_requests_archive) == set(requests_archive1)
 
     # Ensure metadata stored in the metadata storage is consistent
     metadata_authority = MetadataAuthority(
         type=MetadataAuthorityType.DEPOSIT_CLIENT,
         url=complete_deposit.client.provider_url,
         metadata={"name": complete_deposit.client.last_name},
     )
 
     actual_authority = swh_storage.metadata_authority_get(
         MetadataAuthorityType.DEPOSIT_CLIENT, url=complete_deposit.client.provider_url
     )
     assert actual_authority == metadata_authority
 
     config = APIConfig()
     metadata_fetcher = MetadataFetcher(
         name=config.tool["name"],
         version=config.tool["version"],
         metadata=config.tool["configuration"],
     )
 
     actual_fetcher = swh_storage.metadata_fetcher_get(
         config.tool["name"], config.tool["version"]
     )
     assert actual_fetcher == metadata_fetcher
 
-    directory_swhid = parse_swhid(complete_deposit.swhid)
+    directory_swhid = ExtendedSWHID.from_string(complete_deposit.swhid)
     page_results = swh_storage.raw_extrinsic_metadata_get(
-        MetadataTargetType.DIRECTORY, directory_swhid, metadata_authority
+        directory_swhid, metadata_authority
     )
     assert page_results == PagedResult(
         results=[
             RawExtrinsicMetadata(
-                type=MetadataTargetType.DIRECTORY,
                 target=directory_swhid,
                 discovery_date=request_meta1.date,
                 authority=attr.evolve(metadata_authority, metadata=None),
                 fetcher=attr.evolve(metadata_fetcher, metadata=None),
                 format="sword-v2-atom-codemeta",
                 metadata=raw_metadata1.encode(),
                 origin=complete_deposit.origin_url,
             )
         ],
         next_page_token=None,
     )
 
 
 def test_put_update_metadata_done_deposit_failure_mismatched_swhid(
     tmp_path,
     authenticated_client,
     complete_deposit,
     deposit_collection,
     atom_dataset,
     swh_storage,
 ):
     """failure: client updates metadata on deposit with SWHID not matching the deposit's.
 
        Response: 400
 
     """
     incorrect_swhid = "swh:1:dir:ef04a768181417fbc5eef4243e2507915f24deea"
     assert complete_deposit.swhid != incorrect_swhid
 
     update_uri = reverse(EDIT_IRI, args=[deposit_collection.name, complete_deposit.id])
     response = put_atom(
         authenticated_client,
         update_uri,
         data=atom_dataset["entry-data1"],
         HTTP_X_CHECK_SWHID=incorrect_swhid,
     )
 
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert b"Mismatched provided SWHID" in response.content
 
 
 def test_put_update_metadata_done_deposit_failure_malformed_xml(
     tmp_path,
     authenticated_client,
     complete_deposit,
     deposit_collection,
     atom_dataset,
     swh_storage,
 ):
     """failure: client updates metadata on deposit done with a malformed xml
 
        Response: 400
 
     """
     update_uri = reverse(EDIT_IRI, args=[deposit_collection.name, complete_deposit.id])
     response = put_atom(
         authenticated_client,
         update_uri,
         data=atom_dataset["entry-data-ko"],
         HTTP_X_CHECK_SWHID=complete_deposit.swhid,
     )
 
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert b"Malformed xml metadata" in response.content
 
 
 def test_put_update_metadata_done_deposit_failure_empty_xml(
     tmp_path,
     authenticated_client,
     complete_deposit,
     deposit_collection,
     atom_dataset,
     swh_storage,
 ):
     """failure: client updates metadata on deposit done with an empty xml.
 
        Response: 400
 
     """
     update_uri = reverse(EDIT_IRI, args=[deposit_collection.name, complete_deposit.id])
 
     atom_content = atom_dataset["entry-data-empty-body"]
     response = put_atom(
         authenticated_client,
         update_uri,
         data=atom_content,
         HTTP_X_CHECK_SWHID=complete_deposit.swhid,
     )
 
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert b"Empty body request is not supported" in response.content
 
 
 def test_put_update_metadata_done_deposit_failure_functional_checks(
     tmp_path,
     authenticated_client,
     complete_deposit,
     deposit_collection,
     atom_dataset,
     swh_storage,
 ):
     """failure: client updates metadata on deposit done without required incomplete metadata
 
        Response: 400
 
     """
     update_uri = reverse(EDIT_IRI, args=[deposit_collection.name, complete_deposit.id])
 
     response = put_atom(
         authenticated_client,
         update_uri,
         # no title, nor author, nor name fields
         data=atom_dataset["entry-data-fail-metadata-functional-checks"],
         HTTP_X_CHECK_SWHID=complete_deposit.swhid,
     )
 
     assert response.status_code == status.HTTP_400_BAD_REQUEST
     assert b"Functional metadata checks failure" in response.content
     # detail on the errors
     msg = (
         b"- Mandatory fields are missing ("
         b"atom:name or atom:title or codemeta:name, "
         b"atom:author or codemeta:author)"
     )
     assert msg in response.content
 
 
 def test_put_atom_with_create_origin_and_external_identifier(
     authenticated_client, deposit_collection, atom_dataset, deposit_user
 ):
     """<atom:external_identifier> was deprecated before <swh:create_origin>
     was introduced, clients should get an error when trying to use both
 
     """
     external_id = "foobar"
     origin_url = deposit_user.provider_url + external_id
     url = reverse(COL_IRI, args=[deposit_collection.name])
 
     response = post_atom(
         authenticated_client,
         url,
         data=atom_dataset["entry-data0"] % origin_url,
         HTTP_IN_PROGRESS="true",
     )
 
     assert response.status_code == status.HTTP_201_CREATED
     response_content = parse_xml(BytesIO(response.content))
 
     for link in response_content["atom:link"]:
         if link["@rel"] == "edit":
             edit_iri = link["@href"]
             break
     else:
         assert False, response_content
 
     # when
     response = put_atom(
         authenticated_client,
         edit_iri,
         data=atom_dataset["error-with-external-identifier"] % external_id,
         HTTP_IN_PROGRESS="false",
     )
 
     assert b"&lt;external_identifier&gt; is deprecated" in response.content
     assert response.status_code == status.HTTP_400_BAD_REQUEST
 
 
 def test_put_atom_with_create_origin_and_reference(
     authenticated_client, deposit_collection, atom_dataset, deposit_user
 ):
     """<swh:reference> and <swh:create_origin> are mutually exclusive
 
     """
     external_id = "foobar"
     origin_url = deposit_user.provider_url + external_id
     url = reverse(COL_IRI, args=[deposit_collection.name])
 
     response = post_atom(
         authenticated_client,
         url,
         data=atom_dataset["entry-data0"] % origin_url,
         HTTP_IN_PROGRESS="true",
     )
 
     assert response.status_code == status.HTTP_201_CREATED
     response_content = parse_xml(BytesIO(response.content))
 
     for link in response_content["atom:link"]:
         if link["@rel"] == "edit":
             edit_iri = link["@href"]
             break
     else:
         assert False, response_content
 
     # when
     response = put_atom(
         authenticated_client,
         edit_iri,
         data=atom_dataset["entry-data-with-origin-reference"].format(url=origin_url),
         HTTP_IN_PROGRESS="false",
     )
 
     assert b"only one may be used on a given deposit" in response.content
     assert response.status_code == status.HTTP_400_BAD_REQUEST
diff --git a/swh/deposit/tests/conftest.py b/swh/deposit/tests/conftest.py
index 131b67f9..67f95d1e 100644
--- a/swh/deposit/tests/conftest.py
+++ b/swh/deposit/tests/conftest.py
@@ -1,479 +1,480 @@
 # Copyright (C) 2019-2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import base64
 from functools import partial
 from io import BytesIO
 import os
 import re
 from typing import Mapping
 
 from django.test.utils import setup_databases  # type: ignore
 from django.urls import reverse_lazy as reverse
 import psycopg2
 from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
 import pytest
 from rest_framework import status
 from rest_framework.test import APIClient
 import yaml
 
 from swh.core.config import read
 from swh.core.pytest_plugin import get_response_cb
 from swh.deposit.config import (
     COL_IRI,
     DEPOSIT_STATUS_DEPOSITED,
     DEPOSIT_STATUS_LOAD_FAILURE,
     DEPOSIT_STATUS_LOAD_SUCCESS,
     DEPOSIT_STATUS_PARTIAL,
     DEPOSIT_STATUS_REJECTED,
     DEPOSIT_STATUS_VERIFIED,
     SE_IRI,
     setup_django_for,
 )
 from swh.deposit.parsers import parse_xml
 from swh.deposit.tests.common import (
     create_arborescence_archive,
     post_archive,
     post_atom,
 )
-from swh.model.identifiers import DIRECTORY, REVISION, SNAPSHOT, swhid
+from swh.model.hashutil import hash_to_bytes
+from swh.model.identifiers import CoreSWHID, ObjectType, QualifiedSWHID
 from swh.scheduler import get_scheduler
 
 # mypy is asked to ignore the import statement above because setup_databases
 # is not part of the d.t.utils.__all__ variable.
 
 
 TEST_USER = {
     "username": "test",
     "password": "password",
     "email": "test@example.org",
     "provider_url": "https://hal-test.archives-ouvertes.fr/",
     "domain": "archives-ouvertes.fr/",
     "collection": {"name": "test"},
 }
 
 
 ANOTHER_TEST_USER = {
     "username": "test2",
     "password": "password2",
     "email": "test@example2.org",
     "provider_url": "https://hal-test.archives-ouvertes.example/",
     "domain": "archives-ouvertes.example/",
     "collection": {"name": "another-collection"},
 }
 
 
 def pytest_configure():
     setup_django_for("testing")
 
 
 @pytest.fixture
 def requests_mock_datadir(datadir, requests_mock_datadir):
     """Override default behavior to deal with put/post methods
 
     """
     cb = partial(get_response_cb, datadir=datadir)
     requests_mock_datadir.put(re.compile("https://"), body=cb)
     requests_mock_datadir.post(re.compile("https://"), body=cb)
     return requests_mock_datadir
 
 
 @pytest.fixture()
 def deposit_config(swh_scheduler_config, swh_storage_backend_config):
     return {
         "max_upload_size": 500,
         "extraction_dir": "/tmp/swh-deposit/test/extraction-dir",
         "checks": False,
         "scheduler": {"cls": "local", **swh_scheduler_config,},
         "storage_metadata": swh_storage_backend_config,
     }
 
 
 @pytest.fixture()
 def deposit_config_path(tmp_path, monkeypatch, deposit_config):
     conf_path = os.path.join(tmp_path, "deposit.yml")
     with open(conf_path, "w") as f:
         f.write(yaml.dump(deposit_config))
     monkeypatch.setenv("SWH_CONFIG_FILENAME", conf_path)
     return conf_path
 
 
 @pytest.fixture(autouse=True)
 def deposit_autoconfig(deposit_config_path):
     """Enforce config for deposit classes inherited from APIConfig."""
     cfg = read(deposit_config_path)
 
     if "scheduler" in cfg:
         # scheduler setup: require the check-deposit and load-deposit tasks
         scheduler = get_scheduler(**cfg["scheduler"])
         task_types = [
             {
                 "type": "check-deposit",
                 "backend_name": "swh.deposit.loader.tasks.ChecksDepositTsk",
                 "description": "Check deposit metadata/archive before loading",
                 "num_retries": 3,
             },
             {
                 "type": "load-deposit",
                 "backend_name": "swh.loader.package.deposit.tasks.LoadDeposit",
                 "description": "Loading deposit archive into swh archive",
                 "num_retries": 3,
             },
         ]
         for task_type in task_types:
             scheduler.create_task_type(task_type)
 
 
 @pytest.fixture(scope="session")
 def django_db_setup(request, django_db_blocker, postgresql_proc):
     from django.conf import settings
 
     settings.DATABASES["default"].update(
         {
             ("ENGINE", "django.db.backends.postgresql"),
             ("NAME", "tests"),
             ("USER", postgresql_proc.user),  # noqa
             ("HOST", postgresql_proc.host),  # noqa
             ("PORT", postgresql_proc.port),  # noqa
         }
     )
     with django_db_blocker.unblock():
         setup_databases(
             verbosity=request.config.option.verbose, interactive=False, keepdb=False
         )
 
 
 def execute_sql(sql):
     """Execute sql to postgres db"""
     with psycopg2.connect(database="postgres") as conn:
         conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
         cur = conn.cursor()
         cur.execute(sql)
 
 
 @pytest.fixture(autouse=True, scope="session")
 def swh_proxy():
     """Automatically inject this fixture in all tests to ensure no outside
        connection takes place.
 
     """
     os.environ["http_proxy"] = "http://localhost:999"
     os.environ["https_proxy"] = "http://localhost:999"
 
 
 def create_deposit_collection(collection_name: str):
     """Create a deposit collection with name collection_name
 
     """
     from swh.deposit.models import DepositCollection
 
     try:
         collection = DepositCollection._default_manager.get(name=collection_name)
     except DepositCollection.DoesNotExist:
         collection = DepositCollection(name=collection_name)
         collection.save()
     return collection
 
 
 def deposit_collection_factory(collection_name=TEST_USER["collection"]["name"]):
     @pytest.fixture
     def _deposit_collection(db, collection_name=collection_name):
         return create_deposit_collection(collection_name)
 
     return _deposit_collection
 
 
 deposit_collection = deposit_collection_factory()
 deposit_another_collection = deposit_collection_factory("another-collection")
 
 
 def _create_deposit_user(db, collection, user_data):
     """Create/Return the test_user "test"
 
     """
     from swh.deposit.models import DepositClient
 
     try:
         user = DepositClient._default_manager.get(username=user_data["username"])
     except DepositClient.DoesNotExist:
         user = DepositClient._default_manager.create_user(
             username=user_data["username"],
             email=user_data["email"],
             password=user_data["password"],
             provider_url=user_data["provider_url"],
             domain=user_data["domain"],
         )
         user.collections = [collection.id]
         user.save()
     return user
 
 
 @pytest.fixture
 def deposit_user(db, deposit_collection):
     return _create_deposit_user(db, deposit_collection, TEST_USER)
 
 
 @pytest.fixture
 def deposit_another_user(db, deposit_another_collection):
     return _create_deposit_user(db, deposit_another_collection, ANOTHER_TEST_USER)
 
 
 @pytest.fixture
 def client():
     """Override pytest-django one which does not work for djangorestframework.
 
     """
     return APIClient()  # <- drf's client
 
 
 def _create_authenticated_client(client, user, user_data):
     """Returned a logged client
 
     This also patched the client instance to keep a reference on the associated
     deposit_user.
 
     """
     _token = "%s:%s" % (user.username, user_data["password"])
     token = base64.b64encode(_token.encode("utf-8"))
     authorization = "Basic %s" % token.decode("utf-8")
     client.credentials(HTTP_AUTHORIZATION=authorization)
     client.deposit_client = user
     yield client
     client.logout()
 
 
 @pytest.fixture
 def authenticated_client(client, deposit_user):
     yield from _create_authenticated_client(client, deposit_user, TEST_USER)
 
 
 @pytest.fixture
 def another_authenticated_client(deposit_another_user):
     client = APIClient()
     yield from _create_authenticated_client(
         client, deposit_another_user, ANOTHER_TEST_USER
     )
 
 
 @pytest.fixture
 def sample_archive(tmp_path):
     """Returns a sample archive
 
     """
     tmp_path = str(tmp_path)  # pytest version limitation in previous version
     archive = create_arborescence_archive(
         tmp_path, "archive1", "file1", b"some content in file"
     )
 
     return archive
 
 
 @pytest.fixture
 def atom_dataset(datadir) -> Mapping[str, str]:
     """Compute the paths to atom files.
 
     Returns:
         Dict of atom name per content (bytes)
 
     """
     atom_path = os.path.join(datadir, "atom")
     data = {}
     for filename in os.listdir(atom_path):
         filepath = os.path.join(atom_path, filename)
         with open(filepath, "rb") as f:
             raw_content = f.read().decode("utf-8")
 
         # Keep the filename without extension
         atom_name = filename.split(".")[0]
         data[atom_name] = raw_content
 
     return data
 
 
 def create_deposit(
     authenticated_client,
     collection_name: str,
     sample_archive,
     external_id: str,
     deposit_status=DEPOSIT_STATUS_DEPOSITED,
     in_progress=False,
 ):
     """Create a skeleton shell deposit
 
     """
     url = reverse(COL_IRI, args=[collection_name])
     # when
     response = post_archive(
         authenticated_client,
         url,
         sample_archive,
         HTTP_SLUG=external_id,
         HTTP_IN_PROGRESS=str(in_progress).lower(),
     )
 
     # then
     assert response.status_code == status.HTTP_201_CREATED, response.content.decode()
     from swh.deposit.models import Deposit
 
     response_content = parse_xml(BytesIO(response.content))
     deposit_id = response_content["swh:deposit_id"]
     deposit = Deposit._default_manager.get(id=deposit_id)
 
     if deposit.status != deposit_status:
         deposit.status = deposit_status
         deposit.save()
     assert deposit.status == deposit_status
     return deposit
 
 
 def create_binary_deposit(
     authenticated_client,
     collection_name: str,
     deposit_status: str = DEPOSIT_STATUS_DEPOSITED,
     atom_dataset: Mapping[str, bytes] = {},
     **kwargs,
 ):
     """Create a deposit with both metadata and archive set. Then alters its status
        to `deposit_status`.
 
     """
     deposit = create_deposit(
         authenticated_client,
         collection_name,
         deposit_status=DEPOSIT_STATUS_PARTIAL,
         **kwargs,
     )
 
     origin_url = deposit.client.provider_url + deposit.external_id
 
     response = post_atom(
         authenticated_client,
         reverse(SE_IRI, args=[collection_name, deposit.id]),
         data=atom_dataset["entry-data0"] % origin_url,
         HTTP_IN_PROGRESS="true",
     )
 
     assert response.status_code == status.HTTP_201_CREATED
     assert deposit.status == DEPOSIT_STATUS_PARTIAL
 
     from swh.deposit.models import Deposit
 
     deposit = Deposit._default_manager.get(pk=deposit.id)
 
     assert deposit.status == deposit_status
     return deposit
 
 
 def deposit_factory(deposit_status=DEPOSIT_STATUS_DEPOSITED, in_progress=False):
     """Build deposit with a specific status
 
     """
 
     @pytest.fixture()
     def _deposit(
         sample_archive,
         deposit_collection,
         authenticated_client,
         deposit_status=deposit_status,
     ):
         external_id = "external-id-%s" % deposit_status
         return create_deposit(
             authenticated_client,
             deposit_collection.name,
             sample_archive,
             external_id=external_id,
             deposit_status=deposit_status,
             in_progress=in_progress,
         )
 
     return _deposit
 
 
 deposited_deposit = deposit_factory()
 rejected_deposit = deposit_factory(deposit_status=DEPOSIT_STATUS_REJECTED)
 partial_deposit = deposit_factory(
     deposit_status=DEPOSIT_STATUS_PARTIAL, in_progress=True
 )
 verified_deposit = deposit_factory(deposit_status=DEPOSIT_STATUS_VERIFIED)
 completed_deposit = deposit_factory(deposit_status=DEPOSIT_STATUS_LOAD_SUCCESS)
 failed_deposit = deposit_factory(deposit_status=DEPOSIT_STATUS_LOAD_FAILURE)
 
 
 @pytest.fixture
 def partial_deposit_with_metadata(
     sample_archive, deposit_collection, authenticated_client, atom_dataset
 ):
     """Returns deposit with archive and metadata provided, status 'partial'
 
     """
     return create_binary_deposit(
         authenticated_client,
         deposit_collection.name,
         sample_archive=sample_archive,
         external_id="external-id-partial",
         in_progress=True,
         deposit_status=DEPOSIT_STATUS_PARTIAL,
         atom_dataset=atom_dataset,
     )
 
 
 @pytest.fixture
 def partial_deposit_only_metadata(
     deposit_collection, authenticated_client, atom_dataset
 ):
 
     response = post_atom(
         authenticated_client,
         reverse(COL_IRI, args=[deposit_collection.name]),
         data=atom_dataset["entry-data1"],
         HTTP_SLUG="external-id-partial",
         HTTP_IN_PROGRESS=True,
     )
 
     assert response.status_code == status.HTTP_201_CREATED
 
     response_content = parse_xml(response.content)
     deposit_id = response_content["swh:deposit_id"]
     from swh.deposit.models import Deposit
 
     deposit = Deposit._default_manager.get(pk=deposit_id)
     assert deposit.status == DEPOSIT_STATUS_PARTIAL
     return deposit
 
 
 @pytest.fixture
 def complete_deposit(sample_archive, deposit_collection, authenticated_client):
     """Returns a completed deposit (load success)
 
     """
     deposit = create_deposit(
         authenticated_client,
         deposit_collection.name,
         sample_archive,
         external_id="external-id-complete",
         deposit_status=DEPOSIT_STATUS_LOAD_SUCCESS,
     )
     origin = "https://hal.archives-ouvertes.fr/hal-01727745"
     directory_id = "42a13fc721c8716ff695d0d62fc851d641f3a12b"
-    revision_id = "548b3c0a2bb43e1fca191e24b5803ff6b3bc7c10"
-    snapshot_id = "e5e82d064a9c3df7464223042e0c55d72ccff7f0"
-    deposit.swhid = swhid(DIRECTORY, directory_id)
-    deposit.swhid_context = swhid(
-        DIRECTORY,
-        directory_id,
-        metadata={
-            "origin": origin,
-            "visit": swhid(SNAPSHOT, snapshot_id),
-            "anchor": swhid(REVISION, revision_id),
-            "path": "/",
-        },
+    revision_id = hash_to_bytes("548b3c0a2bb43e1fca191e24b5803ff6b3bc7c10")
+    snapshot_id = hash_to_bytes("e5e82d064a9c3df7464223042e0c55d72ccff7f0")
+    deposit.swhid = f"swh:1:dir:{directory_id}"
+    deposit.swhid_context = str(
+        QualifiedSWHID(
+            object_type=ObjectType.DIRECTORY,
+            object_id=hash_to_bytes(directory_id),
+            origin=origin,
+            visit=CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=snapshot_id),
+            anchor=CoreSWHID(object_type=ObjectType.REVISION, object_id=revision_id),
+            path=b"/",
+        )
     )
     deposit.save()
     return deposit
 
 
 @pytest.fixture()
 def tmp_path(tmp_path):
     return str(tmp_path)  # issue with oldstable's pytest version
diff --git a/swh/deposit/tests/test_utils.py b/swh/deposit/tests/test_utils.py
index f5bd8986..21842ed1 100644
--- a/swh/deposit/tests/test_utils.py
+++ b/swh/deposit/tests/test_utils.py
@@ -1,305 +1,287 @@
 # Copyright (C) 2018-2020  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
-from typing import Union
 from unittest.mock import patch
 
 import pytest
 
 from swh.deposit import utils
 from swh.deposit.parsers import parse_xml
 from swh.model.exceptions import ValidationError
-from swh.model.identifiers import SWHID, parse_swhid
-from swh.model.model import MetadataTargetType
+from swh.model.identifiers import CoreSWHID, QualifiedSWHID
 
 
 @pytest.fixture
 def xml_with_origin_reference():
     xml_data = """<?xml version="1.0"?>
   <entry xmlns="http://www.w3.org/2005/Atom"
            xmlns:codemeta="https://doi.org/10.5063/SCHEMA/CODEMETA-2.0"
            xmlns:swh="https://www.softwareheritage.org/schema/2018/deposit">
       <swh:deposit>
         <swh:reference>
           <swh:origin url="{url}"/>
         </swh:reference>
       </swh:deposit>
   </entry>
     """
     return xml_data.strip()
 
 
 def test_merge():
     """Calling utils.merge on dicts should merge without losing information
 
     """
     d0 = {"author": "someone", "license": [["gpl2"]], "a": 1}
 
     d1 = {
         "author": ["author0", {"name": "author1"}],
         "license": [["gpl3"]],
         "b": {"1": "2"},
     }
 
     d2 = {"author": map(lambda x: x, ["else"]), "license": "mit", "b": {"2": "3",}}
 
     d3 = {
         "author": (v for v in ["no one"]),
     }
 
     actual_merge = utils.merge(d0, d1, d2, d3)
 
     expected_merge = {
         "a": 1,
         "license": [["gpl2"], ["gpl3"], "mit"],
         "author": ["someone", "author0", {"name": "author1"}, "else", "no one"],
         "b": {"1": "2", "2": "3",},
     }
     assert actual_merge == expected_merge
 
 
 def test_merge_2():
     d0 = {"license": "gpl2", "runtime": {"os": "unix derivative"}}
 
     d1 = {"license": "gpl3", "runtime": "GNU/Linux"}
 
     expected = {
         "license": ["gpl2", "gpl3"],
         "runtime": [{"os": "unix derivative"}, "GNU/Linux"],
     }
 
     actual = utils.merge(d0, d1)
     assert actual == expected
 
 
 def test_merge_edge_cases():
     input_dict = {
         "license": ["gpl2", "gpl3"],
         "runtime": [{"os": "unix derivative"}, "GNU/Linux"],
     }
     # against empty dict
     actual = utils.merge(input_dict, {})
     assert actual == input_dict
 
     # against oneself
     actual = utils.merge(input_dict, input_dict, input_dict)
     assert actual == input_dict
 
 
 def test_merge_one_dict():
     """Merge one dict should result in the same dict value
 
     """
     input_and_expected = {"anything": "really"}
     actual = utils.merge(input_and_expected)
     assert actual == input_and_expected
 
 
 def test_merge_raise():
     """Calling utils.merge with any no dict argument should raise
 
     """
     d0 = {"author": "someone", "a": 1}
 
     d1 = ["not a dict"]
 
     with pytest.raises(ValueError):
         utils.merge(d0, d1)
 
     with pytest.raises(ValueError):
         utils.merge(d1, d0)
 
     with pytest.raises(ValueError):
         utils.merge(d1)
 
     assert utils.merge(d0) == d0
 
 
 @patch("swh.deposit.utils.normalize_timestamp", side_effect=lambda x: x)
 def test_normalize_date_0(mock_normalize):
     """When date is a list, choose the first date and normalize it
 
     Note: We do not test swh.model.identifiers which is already tested
     in swh.model
 
     """
     actual_date = utils.normalize_date(["2017-10-12", "date1"])
 
     expected_date = "2017-10-12 00:00:00+00:00"
 
     assert str(actual_date) == expected_date
 
 
 @patch("swh.deposit.utils.normalize_timestamp", side_effect=lambda x: x)
 def test_normalize_date_1(mock_normalize):
     """Providing a date in a reasonable format, everything is fine
 
     Note: We do not test swh.model.identifiers which is already tested
     in swh.model
 
     """
     actual_date = utils.normalize_date("2018-06-11 17:02:02")
 
     expected_date = "2018-06-11 17:02:02+00:00"
 
     assert str(actual_date) == expected_date
 
 
 @patch("swh.deposit.utils.normalize_timestamp", side_effect=lambda x: x)
 def test_normalize_date_doing_irrelevant_stuff(mock_normalize):
     """Providing a date with only the year results in a reasonable date
 
     Note: We do not test swh.model.identifiers which is already tested
     in swh.model
 
     """
     actual_date = utils.normalize_date("2017")
 
     expected_date = "2017-01-01 00:00:00+00:00"
 
     assert str(actual_date) == expected_date
 
 
 @pytest.mark.parametrize(
-    "swhid_or_origin,expected_type,expected_metadata_context",
+    "swhid,expected_metadata_context",
     [
-        ("https://something", MetadataTargetType.ORIGIN, {"origin": None}),
-        (
-            "swh:1:cnt:51b5c8cc985d190b5a7ef4878128ebfdc2358f49",
-            MetadataTargetType.CONTENT,
-            {"origin": None},
-        ),
+        ("swh:1:cnt:51b5c8cc985d190b5a7ef4878128ebfdc2358f49", {"origin": None},),
         (
             "swh:1:snp:51b5c8cc985d190b5a7ef4878128ebfdc2358f49;origin=http://blah",
-            MetadataTargetType.SNAPSHOT,
             {"origin": "http://blah", "path": None},
         ),
         (
             "swh:1:dir:51b5c8cc985d190b5a7ef4878128ebfdc2358f49;path=/path",
-            MetadataTargetType.DIRECTORY,
             {"origin": None, "path": b"/path"},
         ),
         (
             "swh:1:rev:51b5c8cc985d190b5a7ef4878128ebfdc2358f49;visit=swh:1:snp:41b5c8cc985d190b5a7ef4878128ebfdc2358f49",  # noqa
-            MetadataTargetType.REVISION,
             {
                 "origin": None,
                 "path": None,
-                "snapshot": parse_swhid(
+                "snapshot": CoreSWHID.from_string(
                     "swh:1:snp:41b5c8cc985d190b5a7ef4878128ebfdc2358f49"
                 ),
             },
         ),
         (
             "swh:1:rel:51b5c8cc985d190b5a7ef4878128ebfdc2358f49;anchor=swh:1:dir:41b5c8cc985d190b5a7ef4878128ebfdc2358f49",  # noqa
-            MetadataTargetType.RELEASE,
             {
                 "origin": None,
                 "path": None,
-                "directory": parse_swhid(
+                "directory": CoreSWHID.from_string(
                     "swh:1:dir:41b5c8cc985d190b5a7ef4878128ebfdc2358f49"
                 ),
             },
         ),
     ],
 )
-def test_compute_metadata_context(
-    swhid_or_origin: Union[str, SWHID], expected_type, expected_metadata_context
-):
-    if expected_type != MetadataTargetType.ORIGIN:
-        assert isinstance(swhid_or_origin, str)
-        swhid_or_origin = parse_swhid(swhid_or_origin)
-
-    object_type, metadata_context = utils.compute_metadata_context(swhid_or_origin)
-
-    assert object_type == expected_type
-    assert metadata_context == expected_metadata_context
+def test_compute_metadata_context(swhid: str, expected_metadata_context):
+    assert expected_metadata_context == utils.compute_metadata_context(
+        QualifiedSWHID.from_string(swhid)
+    )
 
 
 def test_parse_swh_reference_origin(xml_with_origin_reference):
     url = "https://url"
     xml_data = xml_with_origin_reference.format(url=url)
     metadata = parse_xml(xml_data)
 
     actual_origin = utils.parse_swh_reference(metadata)
     assert actual_origin == url
 
 
 @pytest.fixture
 def xml_with_empty_reference():
     xml_data = """<?xml version="1.0"?>
   <entry xmlns:swh="https://www.softwareheritage.org/schema/2018/deposit">
       <swh:deposit>
         {swh_reference}
       </swh:deposit>
   </entry>
     """
     return xml_data.strip()
 
 
 @pytest.mark.parametrize(
     "xml_ref",
     [
         "",
         "<swh:reference></swh:reference>",
         "<swh:reference><swh:object /></swh:reference>",
         """<swh:reference><swh:object swhid="" /></swh:reference>""",
     ],
 )
 def test_parse_swh_reference_empty(xml_with_empty_reference, xml_ref):
     xml_body = xml_with_empty_reference.format(swh_reference=xml_ref)
     metadata = utils.parse_xml(xml_body)
 
     assert utils.parse_swh_reference(metadata) is None
 
 
 @pytest.fixture
 def xml_with_swhid(atom_dataset):
     return atom_dataset["entry-data-with-swhid"]
 
 
 @pytest.mark.parametrize(
     "swhid",
     [
         "swh:1:cnt:31b5c8cc985d190b5a7ef4878128ebfdc2358f49;origin=https://hal.archives-ouvertes.fr/hal-01243573;visit=swh:1:snp:4fc1e36fca86b2070204bedd51106014a614f321;anchor=swh:1:rev:9c5de20cfb54682370a398fcc733e829903c8cba;path=/moranegg-AffectationRO-df7f68b/",  # noqa
         "swh:1:dir:31b5c8cc985d190b5a7ef4878128ebfdc2358f49;anchor=swh:1:dir:9c5de20cfb54682370a398fcc733e829903c8cba",  # noqa
         "swh:1:rev:31b5c8cc985d190b5a7ef4878128ebfdc2358f49;anchor=swh:1:rev:9c5de20cfb54682370a398fcc733e829903c8cba",  # noqa
         "swh:1:rel:31b5c8cc985d190b5a7ef4878128ebfdc2358f49;anchor=swh:1:rel:9c5de20cfb54682370a398fcc733e829903c8cba",  # noqa
         "swh:1:snp:31b5c8cc985d190b5a7ef4878128ebfdc2358f49;anchor=swh:1:snp:9c5de20cfb54682370a398fcc733e829903c8cba",  # noqa
         "swh:1:dir:31b5c8cc985d190b5a7ef4878128ebfdc2358f49",
     ],
 )
 def test_parse_swh_reference_swhid(swhid, xml_with_swhid):
     xml_data = xml_with_swhid.format(swhid=swhid)
     metadata = utils.parse_xml(xml_data)
 
     actual_swhid = utils.parse_swh_reference(metadata)
     assert actual_swhid is not None
 
-    expected_swhid = parse_swhid(swhid)
+    expected_swhid = QualifiedSWHID.from_string(swhid)
     assert actual_swhid == expected_swhid
 
 
 @pytest.mark.parametrize(
     "invalid_swhid",
     [
         # incorrect length
         "swh:1:cnt:31b5c8cc985d190b5a7ef4878128ebfdc235"  # noqa
         # visit qualifier should be a core SWHID with type,
         "swh:1:dir:c4993c872593e960dc84e4430dbbfbc34fd706d0;visit=swh:1:rev:0175049fc45055a3824a1675ac06e3711619a55a",  # noqa
         # anchor qualifier should be a core SWHID with type one of
         "swh:1:rev:c4993c872593e960dc84e4430dbbfbc34fd706d0;anchor=swh:1:cnt:b5f505b005435fa5c4fa4c279792bd7b17167c04;path=/",  # noqa
-        "swh:1:rev:c4993c872593e960dc84e4430dbbfbc34fd706d0;visit=swh:1:snp:0175049fc45055a3824a1675ac06e3711619a55a;anchor=swh:1:snp:b5f505b005435fa5c4fa4c279792bd7b17167c04"  # noqa
+        "swh:1:rev:c4993c872593e960dc84e4430dbbfbc34fd706d0;visit=swh:1:snp:0175049fc45055a3824a1675ac06e3711619a55a;anchor=swh:1:snp:b5f505b005435fa5c4fa4c279792bd7b17167c04",  # noqa
     ],
 )
 def test_parse_swh_reference_invalid_swhid(invalid_swhid, xml_with_swhid):
     """Unparsable swhid should raise
 
     """
     xml_invalid_swhid = xml_with_swhid.format(swhid=invalid_swhid)
     metadata = utils.parse_xml(xml_invalid_swhid)
 
     with pytest.raises(ValidationError):
         utils.parse_swh_reference(metadata)
diff --git a/swh/deposit/utils.py b/swh/deposit/utils.py
index 9bbd12c3..3482ff60 100644
--- a/swh/deposit/utils.py
+++ b/swh/deposit/utils.py
@@ -1,244 +1,234 @@
 # Copyright (C) 2018-2020 The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import logging
 from types import GeneratorType
-from typing import Any, Dict, Optional, Tuple, Union
+from typing import Any, Dict, Optional, Union
 
 import iso8601
 import xmltodict
 
 from swh.model.exceptions import ValidationError
 from swh.model.identifiers import (
-    DIRECTORY,
-    RELEASE,
-    REVISION,
-    SNAPSHOT,
-    SWHID,
+    ExtendedSWHID,
+    ObjectType,
+    QualifiedSWHID,
     normalize_timestamp,
-    parse_swhid,
 )
-from swh.model.model import MetadataTargetType
 
 logger = logging.getLogger(__name__)
 
 
 def parse_xml(stream, encoding="utf-8"):
     namespaces = {
         "http://www.w3.org/2005/Atom": "atom",
         "http://www.w3.org/2007/app": "app",
         "http://purl.org/dc/terms/": "dc",
         "https://doi.org/10.5063/SCHEMA/CODEMETA-2.0": "codemeta",
         "http://purl.org/net/sword/terms/": "sword",
         "https://www.softwareheritage.org/schema/2018/deposit": "swh",
     }
 
     data = xmltodict.parse(
         stream,
         encoding=encoding,
         namespaces=namespaces,
         process_namespaces=True,
         dict_constructor=dict,
     )
     if "atom:entry" in data:
         data = data["atom:entry"]
     return data
 
 
 def merge(*dicts):
     """Given an iterator of dicts, merge them losing no information.
 
     Args:
         *dicts: arguments are all supposed to be dict to merge into one
 
     Returns:
         dict merged without losing information
 
     """
 
     def _extend(existing_val, value):
         """Given an existing value and a value (as potential lists), merge
            them together without repetition.
 
         """
         if isinstance(value, (list, map, GeneratorType)):
             vals = value
         else:
             vals = [value]
         for v in vals:
             if v in existing_val:
                 continue
             existing_val.append(v)
         return existing_val
 
     d = {}
     for data in dicts:
         if not isinstance(data, dict):
             raise ValueError("dicts is supposed to be a variable arguments of dict")
 
         for key, value in data.items():
             existing_val = d.get(key)
             if not existing_val:
                 d[key] = value
                 continue
             if isinstance(existing_val, (list, map, GeneratorType)):
                 new_val = _extend(existing_val, value)
             elif isinstance(existing_val, dict):
                 if isinstance(value, dict):
                     new_val = merge(existing_val, value)
                 else:
                     new_val = _extend([existing_val], value)
             else:
                 new_val = _extend([existing_val], value)
             d[key] = new_val
     return d
 
 
 def normalize_date(date):
     """Normalize date fields as expected by swh workers.
 
     If date is a list, elect arbitrarily the first element of that
     list
 
     If date is (then) a string, parse it through
     dateutil.parser.parse to extract a datetime.
 
     Then normalize it through
     swh.model.identifiers.normalize_timestamp.
 
     Returns
         The swh date object
 
     """
     if isinstance(date, list):
         date = date[0]
     if isinstance(date, str):
         date = iso8601.parse_date(date)
 
     return normalize_timestamp(date)
 
 
-def compute_metadata_context(
-    swhid_reference: Union[SWHID, str]
-) -> Tuple[MetadataTargetType, Dict[str, Any]]:
+def compute_metadata_context(swhid_reference: QualifiedSWHID) -> Dict[str, Any]:
     """Given a SWHID object, determine the context as a dict.
 
-    The parse_swhid calls within are not expected to raise (because they should have
-    been caught early on).
-
     """
     metadata_context: Dict[str, Any] = {"origin": None}
-    if isinstance(swhid_reference, SWHID):
-        object_type = MetadataTargetType(swhid_reference.object_type)
-        assert object_type != MetadataTargetType.ORIGIN
-
-        if swhid_reference.metadata:
-            path = swhid_reference.metadata.get("path")
-            metadata_context = {
-                "origin": swhid_reference.metadata.get("origin"),
-                "path": path.encode() if path else None,
-            }
-            snapshot = swhid_reference.metadata.get("visit")
-            if snapshot:
-                metadata_context["snapshot"] = parse_swhid(snapshot)
-
-            anchor = swhid_reference.metadata.get("anchor")
-            if anchor:
-                anchor_swhid = parse_swhid(anchor)
-                metadata_context[anchor_swhid.object_type] = anchor_swhid
-    else:
-        object_type = MetadataTargetType.ORIGIN
+    if swhid_reference.qualifiers():
+        metadata_context = {
+            "origin": swhid_reference.origin,
+            "path": swhid_reference.path,
+        }
+        snapshot = swhid_reference.visit
+        if snapshot:
+            metadata_context["snapshot"] = snapshot
+
+        anchor = swhid_reference.anchor
+        if anchor:
+            metadata_context[anchor.object_type.name.lower()] = anchor
 
-    return object_type, metadata_context
+    return metadata_context
 
 
-ALLOWED_QUALIFIERS_NODE_TYPE = (SNAPSHOT, REVISION, RELEASE, DIRECTORY)
+ALLOWED_QUALIFIERS_NODE_TYPE = (
+    ObjectType.SNAPSHOT,
+    ObjectType.REVISION,
+    ObjectType.RELEASE,
+    ObjectType.DIRECTORY,
+)
 
 
-def parse_swh_reference(metadata: Dict) -> Optional[Union[str, SWHID]]:
+def parse_swh_reference(metadata: Dict,) -> Optional[Union[QualifiedSWHID, str]]:
     """Parse swh reference within the metadata dict (or origin) reference if found, None
     otherwise.
 
     <swh:deposit>
       <swh:reference>
         <swh:origin url='https://github.com/user/repo'/>
       </swh:reference>
     </swh:deposit>
 
     or:
 
     <swh:deposit>
       <swh:reference>
         <swh:object swhid="swh:1:dir:31b5c8cc985d190b5a7ef4878128ebfdc2358f49;origin=https://hal.archives-ouvertes.fr/hal-01243573;visit=swh:1:snp:4fc1e36fca86b2070204bedd51106014a614f321;anchor=swh:1:rev:9c5de20cfb54682370a398fcc733e829903c8cba;path=/moranegg-AffectationRO-df7f68b/"
       />
     </swh:deposit>
 
     Raises:
         ValidationError in case the swhid referenced (if any) is invalid
 
     Returns:
         Either swhid or origin reference if any. None otherwise.
 
     """  # noqa
-    visit_swhid = None
-    anchor_swhid = None
-
     swh_deposit = metadata.get("swh:deposit")
     if not swh_deposit:
         return None
 
     swh_reference = swh_deposit.get("swh:reference")
     if not swh_reference:
         return None
 
     swh_origin = swh_reference.get("swh:origin")
     if swh_origin:
         url = swh_origin.get("@url")
         if url:
             return url
 
     swh_object = swh_reference.get("swh:object")
     if not swh_object:
         return None
 
     swhid = swh_object.get("@swhid")
     if not swhid:
         return None
-    swhid_reference = parse_swhid(swhid)
+    swhid_reference = QualifiedSWHID.from_string(swhid)
 
-    if swhid_reference.metadata:
-        anchor = swhid_reference.metadata.get("anchor")
+    if swhid_reference.qualifiers():
+        anchor = swhid_reference.anchor
         if anchor:
-            anchor_swhid = parse_swhid(anchor)
-            if anchor_swhid.object_type not in ALLOWED_QUALIFIERS_NODE_TYPE:
+            if anchor.object_type not in ALLOWED_QUALIFIERS_NODE_TYPE:
                 error_msg = (
                     "anchor qualifier should be a core SWHID with type one of "
-                    f" {', '.join(ALLOWED_QUALIFIERS_NODE_TYPE)}"
+                    f"{', '.join(t.name.lower() for t in ALLOWED_QUALIFIERS_NODE_TYPE)}"
                 )
                 raise ValidationError(error_msg)
 
-        visit = swhid_reference.metadata.get("visit")
+        visit = swhid_reference.visit
         if visit:
-            visit_swhid = parse_swhid(visit)
-            if visit_swhid.object_type != SNAPSHOT:
+            if visit.object_type != ObjectType.SNAPSHOT:
                 raise ValidationError(
-                    f"visit qualifier should be a core SWHID with type {SNAPSHOT}"
+                    f"visit qualifier should be a core SWHID with type snp, "
+                    f"not {visit.object_type.value}"
                 )
 
         if (
-            visit_swhid
-            and anchor_swhid
-            and visit_swhid.object_type == SNAPSHOT
-            and anchor_swhid.object_type == SNAPSHOT
+            visit
+            and anchor
+            and visit.object_type == ObjectType.SNAPSHOT
+            and anchor.object_type == ObjectType.SNAPSHOT
         ):
             logger.warn(
                 "SWHID use of both anchor and visit targeting "
                 f"a snapshot: {swhid_reference}"
             )
             raise ValidationError(
                 "'anchor=swh:1:snp:' is not supported when 'visit' is also provided."
             )
 
     return swhid_reference
+
+
+def extended_swhid_from_qualified(swhid: QualifiedSWHID) -> ExtendedSWHID:
+    """Used to get the target of a metadata object from a <swh:reference>,
+    as the latter uses a QualifiedSWHID."""
+    return ExtendedSWHID.from_string(str(swhid).split(";")[0])