Page MenuHomeSoftware Heritage

D3868.id13695.diff
No OneTemporary

D3868.id13695.diff

diff --git a/swh/loader/package/archive/tests/test_archive.py b/swh/loader/package/archive/tests/test_archive.py
--- a/swh/loader/package/archive/tests/test_archive.py
+++ b/swh/loader/package/archive/tests/test_archive.py
@@ -122,12 +122,11 @@
assert_last_visit_matches(loader.storage, URL, status="full", type="tar")
expected_revision_id = hash_to_bytes("44183488c0774ce3c957fa19ba695cf18a4a42b3")
- revision = list(loader.storage.revision_get([expected_revision_id]))[0]
-
+ revision = loader.storage.revision_get([expected_revision_id])[0]
assert revision is not None
check_metadata_paths(
- revision["metadata"],
+ revision.metadata,
paths=[
("intrinsic", dict),
("extrinsic.provider", str),
@@ -137,7 +136,7 @@
],
)
- for original_artifact in revision["metadata"]["original_artifact"]:
+ for original_artifact in revision.metadata["original_artifact"]:
check_metadata_paths(
original_artifact,
paths=[("filename", str), ("length", int), ("checksums", dict),],
diff --git a/swh/loader/package/deposit/loader.py b/swh/loader/package/deposit/loader.py
--- a/swh/loader/package/deposit/loader.py
+++ b/swh/loader/package/deposit/loader.py
@@ -218,24 +218,17 @@
return r
rev_id = branches[b"HEAD"].target
- revisions = list(self.storage.revision_get([rev_id]))
- if not revisions:
- return r
-
- revision = revisions[0]
+ revision = self.storage.revision_get([rev_id])[0]
if not revision:
return r
- # Retrieve the revision identifier
- dir_id = revision["directory"]
-
# update the deposit's status to success with its
# revision-id and directory-id
self.client.status_update(
self.deposit_id,
status="done",
revision_id=hash_to_hex(rev_id),
- directory_id=hash_to_hex(dir_id),
+ directory_id=hash_to_hex(revision.directory),
snapshot_id=r["snapshot_id"],
origin_url=self.url,
)
diff --git a/swh/loader/package/deposit/tests/test_deposit.py b/swh/loader/package/deposit/tests/test_deposit.py
--- a/swh/loader/package/deposit/tests/test_deposit.py
+++ b/swh/loader/package/deposit/tests/test_deposit.py
@@ -129,12 +129,11 @@
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
expected_revision_id = hash_to_bytes("637318680351f5d78856d13264faebbd91efe9bb")
- revision = list(loader.storage.revision_get([expected_revision_id]))[0]
-
+ revision = loader.storage.revision_get([expected_revision_id])[0]
assert revision is not None
check_metadata_paths(
- revision["metadata"],
+ revision.metadata,
paths=[
("extrinsic.provider", str),
("extrinsic.when", str),
@@ -144,9 +143,9 @@
)
# Only 2 top-level keys now
- assert set(revision["metadata"].keys()) == {"extrinsic", "original_artifact"}
+ assert set(revision.metadata.keys()) == {"extrinsic", "original_artifact"}
- for original_artifact in revision["metadata"]["original_artifact"]:
+ for original_artifact in revision.metadata["original_artifact"]:
check_metadata_paths(
original_artifact,
paths=[("filename", str), ("length", int), ("checksums", dict),],
@@ -193,8 +192,8 @@
)
check_snapshot(expected_snapshot, storage=loader.storage)
- revision = next(loader.storage.revision_get([revision_id]))
- assert revision
+ revision = loader.storage.revision_get([revision_id])[0]
+ assert revision is not None
# check metadata
@@ -240,7 +239,7 @@
expected_body = {
"status": "done",
"revision_id": revision_id_hex,
- "directory_id": hash_to_hex(revision["directory"]),
+ "directory_id": hash_to_hex(revision.directory),
"snapshot_id": expected_snapshot_id,
"origin_url": url,
}
@@ -282,14 +281,14 @@
# Ensure the date fields are set appropriately in the revision
# Retrieve the revision
- revision = next(loader.storage.revision_get([hash_to_bytes(revision_id)]))
+ revision = loader.storage.revision_get([hash_to_bytes(revision_id)])[0]
assert revision
- assert revision["date"] == raw_meta["deposit"]["author_date"]
- assert revision["committer_date"] == raw_meta["deposit"]["committer_date"]
+ assert revision.date.to_dict() == raw_meta["deposit"]["author_date"]
+ assert revision.committer_date.to_dict() == raw_meta["deposit"]["committer_date"]
read_api = f"{DEPOSIT_URL}/{deposit_id}/meta/"
- assert revision["metadata"] == {
+ assert revision.metadata == {
"extrinsic": {
"provider": read_api,
"raw": {
@@ -316,7 +315,7 @@
},
},
},
- "when": revision["metadata"]["extrinsic"]["when"], # dynamic
+ "when": revision.metadata["extrinsic"]["when"], # dynamic
},
"original_artifact": [
{
@@ -401,7 +400,7 @@
expected_body = {
"status": "done",
"revision_id": revision_id,
- "directory_id": hash_to_hex(revision["directory"]),
+ "directory_id": hash_to_hex(revision.directory),
"snapshot_id": expected_snapshot_id,
"origin_url": url,
}
diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py
--- a/swh/loader/package/loader.py
+++ b/swh/loader/package/loader.py
@@ -34,7 +34,6 @@
from swh.model.collections import ImmutableDict
from swh.model.hashutil import hash_to_hex
from swh.model.model import (
- BaseModel,
Sha1Git,
Revision,
TargetType,
@@ -209,7 +208,9 @@
"""
return snapshot_get_latest(self.storage, self.url)
- def known_artifacts(self, snapshot: Optional[Snapshot]) -> Dict[Sha1Git, BaseModel]:
+ def known_artifacts(
+ self, snapshot: Optional[Snapshot]
+ ) -> Dict[Sha1Git, Optional[ImmutableDict[str, object]]]:
"""Retrieve the known releases/artifact for the origin.
Args
@@ -229,11 +230,8 @@
if rev and rev.target_type == TargetType.REVISION
]
known_revisions = self.storage.revision_get(revs)
-
return {
- revision["id"]: revision["metadata"]
- for revision in known_revisions
- if revision
+ revision.id: revision.metadata for revision in known_revisions if revision
}
def resolve_revision_from(
diff --git a/swh/loader/package/nixguix/loader.py b/swh/loader/package/nixguix/loader.py
--- a/swh/loader/package/nixguix/loader.py
+++ b/swh/loader/package/nixguix/loader.py
@@ -13,6 +13,7 @@
from swh.model import hashutil
+from swh.model.collections import ImmutableDict
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
@@ -20,7 +21,6 @@
RevisionType,
TargetType,
Snapshot,
- BaseModel,
Sha1Git,
)
@@ -110,7 +110,9 @@
p_info = NixGuixPackageInfo.from_metadata({"url": url, "integrity": integrity})
yield url, p_info
- def known_artifacts(self, snapshot: Optional[Snapshot]) -> Dict[Sha1Git, BaseModel]:
+ def known_artifacts(
+ self, snapshot: Optional[Snapshot]
+ ) -> Dict[Sha1Git, Optional[ImmutableDict[str, object]]]:
"""Almost same implementation as the default one except it filters out the extra
"evaluation" branch which does not have the right metadata structure.
@@ -134,7 +136,7 @@
for revision in known_revisions:
if not revision: # revision_get can return None
continue
- ret[revision["id"]] = revision["metadata"]
+ ret[revision.id] = revision.metadata
return ret
def resolve_revision_from(
diff --git a/swh/loader/package/nixguix/tests/test_nixguix.py b/swh/loader/package/nixguix/tests/test_nixguix.py
--- a/swh/loader/package/nixguix/tests/test_nixguix.py
+++ b/swh/loader/package/nixguix/tests/test_nixguix.py
@@ -99,7 +99,8 @@
revisions = storage.revision_get(revision_ids)
for rev in revisions:
assert rev is not None
- metadata = rev["metadata"]
+ metadata = rev.metadata
+ assert metadata is not None
raw = metadata["extrinsic"]["raw"]
assert "url" in raw
assert "integrity" in raw
@@ -618,9 +619,9 @@
) as last_snapshot:
# mutate the snapshot to target a revision with the wrong metadata structure
# snapshot["branches"][artifact_url.encode("utf-8")] = first_revision
- old_revision = next(loader.storage.revision_get([first_revision.target]))
+ old_revision = loader.storage.revision_get([first_revision.target])[0]
# assert that revision is not in the right format
- assert old_revision["metadata"]["extrinsic"]["raw"].get("integrity", {}) == {}
+ assert old_revision.metadata["extrinsic"]["raw"].get("integrity", {}) == {}
# mutate snapshot to create a clash
snapshot = attr.evolve(
@@ -629,7 +630,7 @@
**snapshot.branches,
artifact_url.encode("utf-8"): SnapshotBranch(
target_type=TargetType.REVISION,
- target=hash_to_bytes(old_revision["id"]),
+ target=hash_to_bytes(old_revision.id),
),
},
)
@@ -656,11 +657,11 @@
new_revision_branch = last_snapshot.branches[artifact_url.encode("utf-8")]
assert new_revision_branch.target_type == TargetType.REVISION
- new_revision = next(loader.storage.revision_get([new_revision_branch.target]))
+ new_revision = loader.storage.revision_get([new_revision_branch.target])[0]
# the new revision has the correct structure, so it got ingested alright by the
# new run
- assert new_revision["metadata"]["extrinsic"]["raw"]["integrity"] is not None
+ assert new_revision.metadata["extrinsic"]["raw"]["integrity"] is not None
nb_detections = 0
actual_detection: Dict
@@ -675,7 +676,7 @@
assert nb_detections == len(all_sources["sources"])
assert actual_detection == {
- "revision": hash_to_hex(old_revision["id"]),
+ "revision": hash_to_hex(old_revision.id),
"reason": "'integrity'",
- "known_artifact": old_revision["metadata"],
+ "known_artifact": old_revision.metadata,
}
diff --git a/swh/loader/package/npm/tests/test_npm.py b/swh/loader/package/npm/tests/test_npm.py
--- a/swh/loader/package/npm/tests/test_npm.py
+++ b/swh/loader/package/npm/tests/test_npm.py
@@ -319,12 +319,11 @@
assert actual_load_status["snapshot_id"] is not None
expected_revision_id = hash_to_bytes("d8a1c7474d2956ac598a19f0f27d52f7015f117e")
- revision = list(loader.storage.revision_get([expected_revision_id]))[0]
-
+ revision = loader.storage.revision_get([expected_revision_id])[0]
assert revision is not None
check_metadata_paths(
- revision["metadata"],
+ revision.metadata,
paths=[
("intrinsic.tool", str),
("intrinsic.raw", dict),
@@ -335,7 +334,7 @@
],
)
- for original_artifact in revision["metadata"]["original_artifact"]:
+ for original_artifact in revision.metadata["original_artifact"]:
check_metadata_paths(
original_artifact,
paths=[("filename", str), ("length", int), ("checksums", dict),],
diff --git a/swh/loader/package/pypi/tests/test_pypi.py b/swh/loader/package/pypi/tests/test_pypi.py
--- a/swh/loader/package/pypi/tests/test_pypi.py
+++ b/swh/loader/package/pypi/tests/test_pypi.py
@@ -313,12 +313,11 @@
assert actual_load_status["snapshot_id"] is not None
expected_revision_id = hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21")
- revision = list(loader.storage.revision_get([expected_revision_id]))[0]
-
+ revision = loader.storage.revision_get([expected_revision_id])[0]
assert revision is not None
check_metadata_paths(
- revision["metadata"],
+ revision.metadata,
paths=[
("intrinsic.tool", str),
("intrinsic.raw", dict),
@@ -329,7 +328,7 @@
],
)
- for original_artifact in revision["metadata"]["original_artifact"]:
+ for original_artifact in revision.metadata["original_artifact"]:
check_metadata_paths(
original_artifact,
paths=[("filename", str), ("length", int), ("checksums", dict),],
diff --git a/swh/loader/tests/__init__.py b/swh/loader/tests/__init__.py
--- a/swh/loader/tests/__init__.py
+++ b/swh/loader/tests/__init__.py
@@ -166,7 +166,7 @@
revs = objects_by_target_type.get(TargetType.REVISION)
if revs:
- revisions = list(storage.revision_get(revs))
+ revisions = storage.revision_get(revs)
not_found = [rev_id for rev_id, rev in zip(revs, revisions) if rev is None]
if not_found:
missing_objs = ", ".join(
@@ -176,10 +176,10 @@
f"Branch/Revision(s) {missing_objs} should exist in storage"
)
# retrieve information from revision
- for rev in revisions:
- assert rev is not None
- objects_by_target_type[TargetType.DIRECTORY].append(rev["directory"])
- object_to_branch[rev["directory"]] = rev["id"]
+ for revision in revisions:
+ assert revision is not None
+ objects_by_target_type[TargetType.DIRECTORY].append(revision.directory)
+ object_to_branch[revision.directory] = revision.id
rels = objects_by_target_type.get(TargetType.RELEASE)
if rels:

File Metadata

Mime Type
text/plain
Expires
Nov 5 2024, 2:30 AM (8 w, 4 d ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3217477

Event Timeline