diff --git a/swh/storage/pytest_plugin.py b/swh/storage/pytest_plugin.py --- a/swh/storage/pytest_plugin.py +++ b/swh/storage/pytest_plugin.py @@ -212,7 +212,13 @@ return { "content": [data.content, data.content2, data.content3], "skipped_content": [data.skipped_content, data.skipped_content2], - "directory": [data.dir2, data.dir, data.dir3, data.dir4], + "directory": [ + data.directory2, + data.directory, + data.directory3, + data.directory4, + data.directory5, + ], "revision": [data.revision, data.revision2, data.revision3, data.revision4], "release": [data.release, data.release2, data.release3], "snapshot": [data.snapshot, data.empty_snapshot, data.complete_snapshot], diff --git a/swh/storage/tests/storage_data.py b/swh/storage/tests/storage_data.py --- a/swh/storage/tests/storage_data.py +++ b/swh/storage/tests/storage_data.py @@ -12,6 +12,8 @@ from swh.model.identifiers import parse_swhid from swh.model.model import ( Content, + Directory, + DirectoryEntry, MetadataAuthority, MetadataAuthorityType, MetadataFetcher, @@ -123,74 +125,81 @@ ) -dir = { - "id": hash_to_bytes("34f335a750111ca0a8b64d8034faec9eedc396be"), - "entries": ( - { - "name": b"foo", - "type": "file", - "target": content.sha1_git, - "perms": from_disk.DentryPerms.content, - }, - { - "name": b"bar\xc3", - "type": "dir", - "target": b"12345678901234567890", - "perms": from_disk.DentryPerms.directory, - }, +directory5 = Directory(entries=()) + +directory = Directory( + id=hash_to_bytes("34f335a750111ca0a8b64d8034faec9eedc396be"), + entries=tuple( + [ + DirectoryEntry( + name=b"foo", + type="file", + target=content.sha1_git, + perms=from_disk.DentryPerms.content, + ), + DirectoryEntry( + name=b"bar\xc3", + type="dir", + target=directory5.id, + perms=from_disk.DentryPerms.directory, + ), + ], ), -} +) -dir2 = { - "id": hash_to_bytes("8505808532953da7d2581741f01b29c04b1cb9ab"), - "entries": ( - { - "name": b"oof", - "type": "file", - "target": content2.sha1_git, - "perms": from_disk.DentryPerms.content, - }, +directory2 = Directory( + id=hash_to_bytes("8505808532953da7d2581741f01b29c04b1cb9ab"), + entries=tuple( + [ + DirectoryEntry( + name=b"oof", + type="file", + target=content2.sha1_git, + perms=from_disk.DentryPerms.content, + ) + ], ), -} +) -dir3 = { - "id": hash_to_bytes("4ea8c6b2f54445e5dd1a9d5bb2afd875d66f3150"), - "entries": ( - { - "name": b"foo", - "type": "file", - "target": content.sha1_git, - "perms": from_disk.DentryPerms.content, - }, - { - "name": b"subdir", - "type": "dir", - "target": hash_to_bytes("34f335a750111ca0a8b64d8034faec9eedc396be"), # dir - "perms": from_disk.DentryPerms.directory, - }, - { - "name": b"hello", - "type": "file", - "target": b"12345678901234567890", - "perms": from_disk.DentryPerms.content, - }, +directory3 = Directory( + id=hash_to_bytes("4ea8c6b2f54445e5dd1a9d5bb2afd875d66f3150"), + entries=tuple( + [ + DirectoryEntry( + name=b"foo", + type="file", + target=content.sha1_git, + perms=from_disk.DentryPerms.content, + ), + DirectoryEntry( + name=b"subdir", + type="dir", + target=directory.id, + perms=from_disk.DentryPerms.directory, + ), + DirectoryEntry( + name=b"hello", + type="file", + target=directory5.id, + perms=from_disk.DentryPerms.content, + ), + ], ), -} +) -dir4 = { - "id": hash_to_bytes("377aa5fcd944fbabf502dbfda55cd14d33c8c3c6"), - "entries": ( - { - "name": b"subdir1", - "type": "dir", - "target": hash_to_bytes("4ea8c6b2f54445e5dd1a9d5bb2afd875d66f3150"), # dir3 - "perms": from_disk.DentryPerms.directory, - }, +directory4 = Directory( + id=hash_to_bytes("377aa5fcd944fbabf502dbfda55cd14d33c8c3c6"), + entries=tuple( + [ + DirectoryEntry( + name=b"subdir1", + type="dir", + target=directory3.id, + perms=from_disk.DentryPerms.directory, + ) + ], ), -} - -directories = (dir, dir2, dir3, dir4) - +) minus_offset = datetime.timezone(datetime.timedelta(minutes=-120)) plus_offset = datetime.timezone(datetime.timedelta(minutes=120)) @@ -538,7 +547,7 @@ snapshot=parse_swhid(f"swh:1:snp:{hash_to_hex(snapshot['id'])}"), release=parse_swhid(f"swh:1:rel:{hash_to_hex(release['id'])}"), revision=parse_swhid(f"swh:1:rev:{hash_to_hex(revision['id'])}"), - directory=parse_swhid(f"swh:1:dir:{hash_to_hex(dir['id'])}"), + directory=parse_swhid(f"swh:1:dir:{hash_to_hex(directory.id)}"), path=b"/foo/bar", ) diff --git a/swh/storage/tests/test_pytest_plugin.py b/swh/storage/tests/test_pytest_plugin.py --- a/swh/storage/tests/test_pytest_plugin.py +++ b/swh/storage/tests/test_pytest_plugin.py @@ -32,6 +32,7 @@ if object_type in [ "content", "skipped_content", + "directory", "fetcher", "authority", "origin_metadata", diff --git a/swh/storage/tests/test_storage.py b/swh/storage/tests/test_storage.py --- a/swh/storage/tests/test_storage.py +++ b/swh/storage/tests/test_storage.py @@ -28,7 +28,6 @@ from swh.model.identifiers import SWHID from swh.model.model import ( Content, - Directory, MetadataTargetType, Origin, OriginVisit, @@ -668,7 +667,7 @@ assert actual_result == {"directory:add": 1} assert list(swh_storage.journal_writer.journal.objects) == [ - ("directory", Directory.from_dict(data.dir)) + ("directory", directory) ] actual_data = list(swh_storage.directory_ls(directory.id)) @@ -783,7 +782,7 @@ def test_directory_entry_get_by_path(self, swh_storage, sample_data_model): cont = sample_data_model["content"][0] - dir1, dir2, dir3, dir4 = sample_data_model["directory"][:4] + dir1, dir2, dir3, dir4, dir5 = sample_data_model["directory"][:5] # given init_missing = list(swh_storage.directory_missing([dir3.id])) @@ -821,7 +820,7 @@ "dir_id": dir3.id, "name": b"hello", "type": "file", - "target": b"12345678901234567890", + "target": dir5.id, "sha1": None, "sha1_git": None, "sha256": None, @@ -847,7 +846,7 @@ expected_entry["name"] = b"subdir1/" + expected_entry["name"] assert actual_entry == expected_entry - # when (nothing should be found here since data.dir is not persisted.) + # when (nothing should be found here since `dir` is not persisted.) for entry in dir2.entries: actual_entry = swh_storage.directory_entry_get_by_path( dir2.id, [entry.name]