diff --git a/swh/storage/pytest_plugin.py b/swh/storage/pytest_plugin.py index 5883f455..2bcdf672 100644 --- a/swh/storage/pytest_plugin.py +++ b/swh/storage/pytest_plugin.py @@ -1,212 +1,200 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import glob from os import path, environ -from typing import Dict, Tuple, Union +from typing import Union import pytest import swh.storage from pytest_postgresql import factories from pytest_postgresql.janitor import DatabaseJanitor, psycopg2, Version from swh.core.utils import numfile_sortkey as sortkey -from swh.model.model import BaseModel from swh.storage import get_storage -from swh.storage.tests.storage_data import data + +from swh.storage.tests.storage_data import StorageData SQL_DIR = path.join(path.dirname(swh.storage.__file__), "sql") environ["LC_ALL"] = "C.UTF-8" DUMP_FILES = path.join(SQL_DIR, "*.sql") @pytest.fixture def swh_storage_backend_config(postgresql_proc, swh_storage_postgresql): """Basic pg storage configuration with no journal collaborator (to avoid pulling optional dependency on clients of this fixture) """ yield { "cls": "local", "db": "postgresql://{user}@{host}:{port}/{dbname}".format( host=postgresql_proc.host, port=postgresql_proc.port, user="postgres", dbname="tests", ), "objstorage": {"cls": "memory", "args": {}}, } @pytest.fixture def swh_storage(swh_storage_backend_config): return get_storage(**swh_storage_backend_config) # the postgres_fact factory fixture below is mostly a copy of the code # from pytest-postgresql. We need a custom version here to be able to # specify our version of the DBJanitor we use. def postgresql_fact(process_fixture_name, db_name=None, dump_files=DUMP_FILES): @pytest.fixture def postgresql_factory(request): """ Fixture factory for PostgreSQL. :param FixtureRequest request: fixture request object :rtype: psycopg2.connection :returns: postgresql client """ config = factories.get_config(request) if not psycopg2: raise ImportError("No module named psycopg2. Please install it.") proc_fixture = request.getfixturevalue(process_fixture_name) # _, config = try_import('psycopg2', request) pg_host = proc_fixture.host pg_port = proc_fixture.port pg_user = proc_fixture.user pg_options = proc_fixture.options pg_db = db_name or config["dbname"] with SwhDatabaseJanitor( pg_user, pg_host, pg_port, pg_db, proc_fixture.version, dump_files=dump_files, ): connection = psycopg2.connect( dbname=pg_db, user=pg_user, host=pg_host, port=pg_port, options=pg_options, ) yield connection connection.close() return postgresql_factory swh_storage_postgresql = postgresql_fact("postgresql_proc") # This version of the DatabaseJanitor implement a different setup/teardown # behavior than than the stock one: instead of dropping, creating and # initializing the database for each test, it create and initialize the db only # once, then it truncate the tables. This is needed to have acceptable test # performances. class SwhDatabaseJanitor(DatabaseJanitor): def __init__( self, user: str, host: str, port: str, db_name: str, version: Union[str, float, Version], dump_files: str = DUMP_FILES, ) -> None: super().__init__(user, host, port, db_name, version) self.dump_files = sorted(glob.glob(dump_files), key=sortkey) def db_setup(self): with psycopg2.connect( dbname=self.db_name, user=self.user, host=self.host, port=self.port, ) as cnx: with cnx.cursor() as cur: for fname in self.dump_files: with open(fname) as fobj: sql = fobj.read().replace("concurrently", "").strip() if sql: cur.execute(sql) cnx.commit() def db_reset(self): with psycopg2.connect( dbname=self.db_name, user=self.user, host=self.host, port=self.port, ) as cnx: with cnx.cursor() as cur: cur.execute( "SELECT table_name FROM information_schema.tables " "WHERE table_schema = %s", ("public",), ) tables = set(table for (table,) in cur.fetchall()) for table in tables: cur.execute("truncate table %s cascade" % table) cur.execute( "SELECT sequence_name FROM information_schema.sequences " "WHERE sequence_schema = %s", ("public",), ) seqs = set(seq for (seq,) in cur.fetchall()) for seq in seqs: cur.execute("ALTER SEQUENCE %s RESTART;" % seq) cnx.commit() def init(self): with self.cursor() as cur: cur.execute( "SELECT COUNT(1) FROM pg_database WHERE datname=%s;", (self.db_name,) ) db_exists = cur.fetchone()[0] == 1 if db_exists: cur.execute( "UPDATE pg_database SET datallowconn=true " "WHERE datname = %s;", (self.db_name,), ) if db_exists: self.db_reset() else: with self.cursor() as cur: cur.execute('CREATE DATABASE "{}";'.format(self.db_name)) self.db_setup() def drop(self): pid_column = "pid" with self.cursor() as cur: cur.execute( "UPDATE pg_database SET datallowconn=false " "WHERE datname = %s;", (self.db_name,), ) cur.execute( "SELECT pg_terminate_backend(pg_stat_activity.{})" "FROM pg_stat_activity " "WHERE pg_stat_activity.datname = %s;".format(pid_column), (self.db_name,), ) @pytest.fixture -def sample_data() -> Dict[str, Tuple[BaseModel, ...]]: +def sample_data() -> StorageData: """Pre-defined sample storage object data to manipulate Returns: - Dict of data model objects (keys: content, directory, revision, release, person, - origin) + StorageData whose attribute keys are data model objects. Either multiple + objects: contents, directories, revisions, releases, ... or simple ones: + content, directory, revision, release, ... """ - return { - "content": data.contents, - "skipped_content": data.skipped_contents, - "directory": data.directories, - "revision": data.revisions, - "release": data.releases, - "snapshot": data.snapshots, - "origin": data.origins, - "origin_visit": data.origin_visits, - "fetcher": data.fetchers, - "authority": data.authorities, - "origin_metadata": data.origin_metadata, - "content_metadata": data.content_metadata, - } + return StorageData() diff --git a/swh/storage/tests/algos/test_origin.py b/swh/storage/tests/algos/test_origin.py index 3b4646e0..aedb0ed5 100644 --- a/swh/storage/tests/algos/test_origin.py +++ b/swh/storage/tests/algos/test_origin.py @@ -1,314 +1,321 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest from unittest.mock import patch from swh.model.model import Origin, OriginVisit, OriginVisitStatus from swh.storage.algos.origin import iter_origins, origin_get_latest_visit_status from swh.storage.utils import now from swh.storage.tests.test_storage import round_to_milliseconds -from swh.storage.tests.storage_data import data def assert_list_eq(left, right, msg=None): assert list(left) == list(right), msg @pytest.fixture def swh_storage_backend_config(): yield { "cls": "memory", } def test_iter_origins(swh_storage): origins = [ Origin(url="bar"), Origin(url="qux"), Origin(url="quuz"), ] assert swh_storage.origin_add(origins) == {"origin:add": 3} assert_list_eq(iter_origins(swh_storage), origins) assert_list_eq(iter_origins(swh_storage, batch_size=1), origins) assert_list_eq(iter_origins(swh_storage, batch_size=2), origins) for i in range(1, 5): assert_list_eq(iter_origins(swh_storage, origin_from=i + 1), origins[i:], i) assert_list_eq( iter_origins(swh_storage, origin_from=i + 1, batch_size=1), origins[i:], i ) assert_list_eq( iter_origins(swh_storage, origin_from=i + 1, batch_size=2), origins[i:], i ) for j in range(i, 5): assert_list_eq( iter_origins(swh_storage, origin_from=i + 1, origin_to=j + 1), origins[i:j], (i, j), ) assert_list_eq( iter_origins( swh_storage, origin_from=i + 1, origin_to=j + 1, batch_size=1 ), origins[i:j], (i, j), ) assert_list_eq( iter_origins( swh_storage, origin_from=i + 1, origin_to=j + 1, batch_size=2 ), origins[i:j], (i, j), ) @patch("swh.storage.in_memory.InMemoryStorage.origin_get_range") def test_iter_origins_batch_size(mock_origin_get_range, swh_storage): mock_origin_get_range.return_value = [] list(iter_origins(swh_storage)) mock_origin_get_range.assert_called_with(origin_from=1, origin_count=10000) list(iter_origins(swh_storage, batch_size=42)) mock_origin_get_range.assert_called_with(origin_from=1, origin_count=42) def test_origin_get_latest_visit_status_none(swh_storage, sample_data): """Looking up unknown objects should return nothing """ # unknown origin so no result assert origin_get_latest_visit_status(swh_storage, "unknown-origin") is None # unknown type so no result - origin = sample_data["origin"][0] - origin_visit = sample_data["origin_visit"][0] + origin = sample_data.origin + origin_visit = sample_data.origin_visit assert origin_visit.origin == origin.url swh_storage.origin_add([origin]) swh_storage.origin_visit_add([origin_visit])[0] assert origin_visit.type != "unknown" actual_origin_visit = origin_get_latest_visit_status( swh_storage, origin.url, type="unknown" ) assert actual_origin_visit is None actual_origin_visit = origin_get_latest_visit_status( swh_storage, origin.url, require_snapshot=True ) assert actual_origin_visit is None actual_origin_visit = origin_get_latest_visit_status( swh_storage, origin.url, allowed_statuses=["unknown"] ) assert actual_origin_visit is None def init_storage_with_origin_visits(swh_storage, sample_data): """Initialize storage with origin/origin-visit/origin-visit-status """ - snapshot = sample_data["snapshot"][2] - origin1, origin2 = sample_data["origin"][:2] + snapshot = sample_data.snapshots[2] + origin1, origin2 = sample_data.origins[:2] swh_storage.origin_add([origin1, origin2]) ov1, ov2 = swh_storage.origin_visit_add( [ OriginVisit( - origin=origin1.url, date=data.date_visit1, type=data.type_visit1, + origin=origin1.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ), OriginVisit( - origin=origin2.url, date=data.date_visit2, type=data.type_visit2, + origin=origin2.url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ), ] ) swh_storage.snapshot_add([snapshot]) date_now = now() date_now = round_to_milliseconds(date_now) - assert data.date_visit1 < data.date_visit2 - assert data.date_visit2 < date_now + assert sample_data.date_visit1 < sample_data.date_visit2 + assert sample_data.date_visit2 < date_now # origin visit status 1 for origin visit 1 ovs11 = OriginVisitStatus( origin=origin1.url, visit=ov1.visit, - date=data.date_visit1, + date=sample_data.date_visit1, status="partial", snapshot=None, ) # origin visit status 2 for origin visit 1 ovs12 = OriginVisitStatus( origin=origin1.url, visit=ov1.visit, - date=data.date_visit2, + date=sample_data.date_visit2, status="ongoing", snapshot=None, ) # origin visit status 1 for origin visit 2 ovs21 = OriginVisitStatus( origin=origin2.url, visit=ov2.visit, - date=data.date_visit2, + date=sample_data.date_visit2, status="ongoing", snapshot=None, ) # origin visit status 2 for origin visit 2 ovs22 = OriginVisitStatus( origin=origin2.url, visit=ov2.visit, date=date_now, status="full", snapshot=snapshot.id, metadata={"something": "wicked"}, ) swh_storage.origin_visit_status_add([ovs11, ovs12, ovs21, ovs22]) return { "origin": [origin1, origin2], "origin_visit": [ov1, ov2], "origin_visit_status": [ovs11, ovs12, ovs21, ovs22], } def test_origin_get_latest_visit_status_filter_type(swh_storage, sample_data): """Filtering origin visit per types should yield consistent results """ objects = init_storage_with_origin_visits(swh_storage, sample_data) origin1, origin2 = objects["origin"] ov1, ov2 = objects["origin_visit"] ovs11, ovs12, _, ovs22 = objects["origin_visit_status"] # no visit for origin1 url with type_visit2 assert ( - origin_get_latest_visit_status(swh_storage, origin1.url, type=data.type_visit2) + origin_get_latest_visit_status( + swh_storage, origin1.url, type=sample_data.type_visit2 + ) is None ) # no visit for origin2 url with type_visit1 assert ( - origin_get_latest_visit_status(swh_storage, origin2.url, type=data.type_visit1) + origin_get_latest_visit_status( + swh_storage, origin2.url, type=sample_data.type_visit1 + ) is None ) # Two visits, both with no snapshot, take the most recent actual_ov1, actual_ovs12 = origin_get_latest_visit_status( - swh_storage, origin1.url, type=data.type_visit1 + swh_storage, origin1.url, type=sample_data.type_visit1 ) assert isinstance(actual_ov1, OriginVisit) assert isinstance(actual_ovs12, OriginVisitStatus) assert actual_ov1.origin == ov1.origin assert actual_ov1.visit == ov1.visit - assert actual_ov1.type == data.type_visit1 + assert actual_ov1.type == sample_data.type_visit1 assert actual_ovs12 == ovs12 # take the most recent visit with type_visit2 actual_ov2, actual_ovs22 = origin_get_latest_visit_status( - swh_storage, origin2.url, type=data.type_visit2 + swh_storage, origin2.url, type=sample_data.type_visit2 ) assert isinstance(actual_ov2, OriginVisit) assert isinstance(actual_ovs22, OriginVisitStatus) assert actual_ov2.origin == ov2.origin assert actual_ov2.visit == ov2.visit - assert actual_ov2.type == data.type_visit2 + assert actual_ov2.type == sample_data.type_visit2 assert actual_ovs22 == ovs22 def test_origin_get_latest_visit_status_filter_status(swh_storage, sample_data): objects = init_storage_with_origin_visits(swh_storage, sample_data) origin1, origin2 = objects["origin"] ov1, ov2 = objects["origin_visit"] ovs11, ovs12, _, ovs22 = objects["origin_visit_status"] # no failed status for that visit assert ( origin_get_latest_visit_status( swh_storage, origin2.url, allowed_statuses=["failed"] ) is None ) # only 1 partial for that visit actual_ov1, actual_ovs11 = origin_get_latest_visit_status( swh_storage, origin1.url, allowed_statuses=["partial"] ) assert actual_ov1.origin == ov1.origin assert actual_ov1.visit == ov1.visit - assert actual_ov1.type == data.type_visit1 + assert actual_ov1.type == sample_data.type_visit1 assert actual_ovs11 == ovs11 # both status exist, take the latest one actual_ov1, actual_ovs12 = origin_get_latest_visit_status( swh_storage, origin1.url, allowed_statuses=["partial", "ongoing"] ) assert actual_ov1.origin == ov1.origin assert actual_ov1.visit == ov1.visit - assert actual_ov1.type == data.type_visit1 + assert actual_ov1.type == sample_data.type_visit1 assert actual_ovs12 == ovs12 assert isinstance(actual_ov1, OriginVisit) assert isinstance(actual_ovs12, OriginVisitStatus) assert actual_ov1.origin == ov1.origin assert actual_ov1.visit == ov1.visit - assert actual_ov1.type == data.type_visit1 + assert actual_ov1.type == sample_data.type_visit1 assert actual_ovs12 == ovs12 # take the most recent visit with type_visit2 actual_ov2, actual_ovs22 = origin_get_latest_visit_status( swh_storage, origin2.url, allowed_statuses=["full"] ) assert actual_ov2.origin == ov2.origin assert actual_ov2.visit == ov2.visit - assert actual_ov2.type == data.type_visit2 + assert actual_ov2.type == sample_data.type_visit2 assert actual_ovs22 == ovs22 def test_origin_get_latest_visit_status_filter_snapshot(swh_storage, sample_data): objects = init_storage_with_origin_visits(swh_storage, sample_data) origin1, origin2 = objects["origin"] _, ov2 = objects["origin_visit"] _, _, _, ovs22 = objects["origin_visit_status"] # there is no visit with snapshot yet for that visit assert ( origin_get_latest_visit_status(swh_storage, origin1.url, require_snapshot=True) is None ) # visit status with partial status visit elected actual_ov2, actual_ovs22 = origin_get_latest_visit_status( swh_storage, origin2.url, require_snapshot=True ) assert actual_ov2.origin == ov2.origin assert actual_ov2.visit == ov2.visit assert actual_ov2.type == ov2.type assert actual_ovs22 == ovs22 date_now = now() # Add another visit swh_storage.origin_visit_add( - [OriginVisit(origin=origin2.url, date=date_now, type=data.type_visit2,),] + [OriginVisit(origin=origin2.url, date=date_now, type=sample_data.type_visit2,),] ) # Requiring the latest visit with a snapshot, we still find the previous visit ov2, ovs22 = origin_get_latest_visit_status( swh_storage, origin2.url, require_snapshot=True ) assert actual_ov2.origin == ov2.origin assert actual_ov2.visit == ov2.visit assert actual_ov2.type == ov2.type assert actual_ovs22 == ovs22 diff --git a/swh/storage/tests/algos/test_snapshot.py b/swh/storage/tests/algos/test_snapshot.py index 9cd1103d..8db201b9 100644 --- a/swh/storage/tests/algos/test_snapshot.py +++ b/swh/storage/tests/algos/test_snapshot.py @@ -1,149 +1,149 @@ # Copyright (C) 2018-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from hypothesis import given import pytest from swh.model.collections import ImmutableDict from swh.model.hypothesis_strategies import snapshots, branch_names, branch_targets from swh.model.model import OriginVisit, OriginVisitStatus, Snapshot from swh.storage.algos.snapshot import snapshot_get_all_branches, snapshot_get_latest from swh.storage.utils import now @pytest.fixture def swh_storage_backend_config(): yield { "cls": "memory", "journal_writer": None, } @given(snapshot=snapshots(min_size=0, max_size=10, only_objects=False)) def test_snapshot_small(swh_storage, snapshot): # noqa swh_storage.snapshot_add([snapshot]) returned_snapshot = snapshot_get_all_branches(swh_storage, snapshot.id) assert snapshot.to_dict() == returned_snapshot @given(branch_name=branch_names(), branch_target=branch_targets(only_objects=True)) def test_snapshot_large(swh_storage, branch_name, branch_target): # noqa snapshot = Snapshot( branches=ImmutableDict( (b"%s%05d" % (branch_name, i), branch_target) for i in range(10000) ), ) swh_storage.snapshot_add([snapshot]) returned_snapshot = snapshot_get_all_branches(swh_storage, snapshot.id) assert snapshot.to_dict() == returned_snapshot def test_snapshot_get_latest_none(swh_storage, sample_data): """Retrieve latest snapshot on unknown origin or origin without snapshot should yield no result """ # unknown origin so None assert snapshot_get_latest(swh_storage, "unknown-origin") is None # no snapshot on origin visit so None - origin = sample_data["origin"][0] + origin = sample_data.origin swh_storage.origin_add([origin]) - origin_visit, origin_visit2 = sample_data["origin_visit"][:2] + origin_visit, origin_visit2 = sample_data.origin_visits[:2] assert origin_visit.origin == origin.url swh_storage.origin_visit_add([origin_visit]) assert snapshot_get_latest(swh_storage, origin.url) is None ov1 = swh_storage.origin_visit_get_latest(origin.url) assert ov1 is not None visit_id = ov1["visit"] # visit references a snapshot but the snapshot does not exist in backend for some # reason - complete_snapshot = sample_data["snapshot"][2] + complete_snapshot = sample_data.snapshots[2] swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=visit_id, date=origin_visit2.date, status="partial", snapshot=complete_snapshot.id, ) ] ) # so we do not find it assert snapshot_get_latest(swh_storage, origin.url) is None assert snapshot_get_latest(swh_storage, origin.url, branches_count=1) is None def test_snapshot_get_latest(swh_storage, sample_data): - origin = sample_data["origin"][0] + origin = sample_data.origin swh_storage.origin_add([origin]) - visit1, visit2 = sample_data["origin_visit"][:2] + visit1, visit2 = sample_data.origin_visits[:2] assert visit1.origin == origin.url swh_storage.origin_visit_add([visit1]) ov1 = swh_storage.origin_visit_get_latest(origin.url) visit_id = ov1["visit"] # Add snapshot to visit1, latest snapshot = visit 1 snapshot - complete_snapshot = sample_data["snapshot"][2] + complete_snapshot = sample_data.snapshots[2] swh_storage.snapshot_add([complete_snapshot]) swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=visit_id, date=visit2.date, status="partial", snapshot=None, ) ] ) assert visit1.date < visit2.date # no snapshot associated to the visit, so None actual_snapshot = snapshot_get_latest( swh_storage, origin.url, allowed_statuses=["partial"] ) assert actual_snapshot is None date_now = now() assert visit2.date < date_now swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=visit_id, date=date_now, status="full", snapshot=complete_snapshot.id, ) ] ) swh_storage.origin_visit_add( [OriginVisit(origin=origin.url, date=now(), type=visit1.type,)] ) actual_snapshot = snapshot_get_latest(swh_storage, origin.url) assert actual_snapshot is not None assert actual_snapshot == complete_snapshot actual_snapshot = snapshot_get_latest(swh_storage, origin.url, branches_count=1) assert actual_snapshot is not None assert actual_snapshot.id == complete_snapshot.id assert len(actual_snapshot.branches.values()) == 1 with pytest.raises(ValueError, match="branches_count must be a positive integer"): snapshot_get_latest(swh_storage, origin.url, branches_count="something-wrong") diff --git a/swh/storage/tests/storage_data.py b/swh/storage/tests/storage_data.py index f76fee4f..3040c771 100644 --- a/swh/storage/tests/storage_data.py +++ b/swh/storage/tests/storage_data.py @@ -1,566 +1,553 @@ # Copyright (C) 2015-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import attr +from typing import Tuple + from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model import from_disk from swh.model.identifiers import parse_swhid from swh.model.model import ( Content, Directory, DirectoryEntry, MetadataAuthority, MetadataAuthorityType, MetadataFetcher, MetadataTargetType, ObjectType, Origin, OriginVisit, Person, RawExtrinsicMetadata, Release, Revision, RevisionType, SkippedContent, Snapshot, SnapshotBranch, TargetType, Timestamp, TimestampWithTimezone, ) class StorageData: - def __getattr__(self, key): - try: - v = globals()[key] - except KeyError as e: - raise AttributeError(e.args[0]) - if hasattr(v, "copy"): - return v.copy() - return v - - -data = StorageData() - - -content = Content( - data=b"42\n", - length=3, - sha1=hash_to_bytes("34973274ccef6ab4dfaaf86599792fa9c3fe4689"), - sha1_git=hash_to_bytes("d81cc0710eb6cf9efd5b920a8453e1e07157b6cd"), - sha256=hash_to_bytes( - "673650f936cb3b0a2f93ce09d81be10748b1b203c19e8176b4eefc1964a0cf3a" - ), - blake2s256=hash_to_bytes( - "d5fe1939576527e42cfd76a9455a2432fe7f56669564577dd93c4280e76d661d" - ), - status="visible", -) - -content2 = Content( - data=b"4242\n", - length=5, - sha1=hash_to_bytes("61c2b3a30496d329e21af70dd2d7e097046d07b7"), - sha1_git=hash_to_bytes("36fade77193cb6d2bd826161a0979d64c28ab4fa"), - sha256=hash_to_bytes( - "859f0b154fdb2d630f45e1ecae4a862915435e663248bb8461d914696fc047cd" - ), - blake2s256=hash_to_bytes( - "849c20fad132b7c2d62c15de310adfe87be94a379941bed295e8141c6219810d" - ), - status="visible", -) - -content3 = Content( - data=b"424242\n", - length=7, - sha1=hash_to_bytes("3e21cc4942a4234c9e5edd8a9cacd1670fe59f13"), - sha1_git=hash_to_bytes("c932c7649c6dfa4b82327d121215116909eb3bea"), - sha256=hash_to_bytes( - "92fb72daf8c6818288a35137b72155f507e5de8d892712ab96277aaed8cf8a36" - ), - blake2s256=hash_to_bytes( - "76d0346f44e5a27f6bafdd9c2befd304aff83780f93121d801ab6a1d4769db11" - ), - status="visible", - ctime=datetime.datetime(2019, 12, 1, tzinfo=datetime.timezone.utc), -) - -contents = (content, content2, content3) - - -skipped_content = SkippedContent( - length=1024 * 1024 * 200, - sha1_git=hash_to_bytes("33e45d56f88993aae6a0198013efa80716fd8920"), - sha1=hash_to_bytes("43e45d56f88993aae6a0198013efa80716fd8920"), - sha256=hash_to_bytes( - "7bbd052ab054ef222c1c87be60cd191addedd24cc882d1f5f7f7be61dc61bb3a" - ), - blake2s256=hash_to_bytes( - "ade18b1adecb33f891ca36664da676e12c772cc193778aac9a137b8dc5834b9b" - ), - reason="Content too long", - status="absent", - origin="file:///dev/zero", -) - -skipped_content2 = SkippedContent( - length=1024 * 1024 * 300, - sha1_git=hash_to_bytes("44e45d56f88993aae6a0198013efa80716fd8921"), - sha1=hash_to_bytes("54e45d56f88993aae6a0198013efa80716fd8920"), - sha256=hash_to_bytes( - "8cbd052ab054ef222c1c87be60cd191addedd24cc882d1f5f7f7be61dc61bb3a" - ), - blake2s256=hash_to_bytes( - "9ce18b1adecb33f891ca36664da676e12c772cc193778aac9a137b8dc5834b9b" - ), - reason="Content too long", - status="absent", -) + """Data model objects to use within tests. -skipped_contents = (skipped_content, skipped_content2) + """ -directory5 = Directory(entries=()) - -directory = Directory( - id=hash_to_bytes("34f335a750111ca0a8b64d8034faec9eedc396be"), - entries=tuple( - [ - DirectoryEntry( - name=b"foo", - type="file", - target=content.sha1_git, - perms=from_disk.DentryPerms.content, + content = Content( + data=b"42\n", + length=3, + sha1=hash_to_bytes("34973274ccef6ab4dfaaf86599792fa9c3fe4689"), + sha1_git=hash_to_bytes("d81cc0710eb6cf9efd5b920a8453e1e07157b6cd"), + sha256=hash_to_bytes( + "673650f936cb3b0a2f93ce09d81be10748b1b203c19e8176b4eefc1964a0cf3a" + ), + blake2s256=hash_to_bytes( + "d5fe1939576527e42cfd76a9455a2432fe7f56669564577dd93c4280e76d661d" + ), + status="visible", + ) + content2 = Content( + data=b"4242\n", + length=5, + sha1=hash_to_bytes("61c2b3a30496d329e21af70dd2d7e097046d07b7"), + sha1_git=hash_to_bytes("36fade77193cb6d2bd826161a0979d64c28ab4fa"), + sha256=hash_to_bytes( + "859f0b154fdb2d630f45e1ecae4a862915435e663248bb8461d914696fc047cd" + ), + blake2s256=hash_to_bytes( + "849c20fad132b7c2d62c15de310adfe87be94a379941bed295e8141c6219810d" + ), + status="visible", + ) + content3 = Content( + data=b"424242\n", + length=7, + sha1=hash_to_bytes("3e21cc4942a4234c9e5edd8a9cacd1670fe59f13"), + sha1_git=hash_to_bytes("c932c7649c6dfa4b82327d121215116909eb3bea"), + sha256=hash_to_bytes( + "92fb72daf8c6818288a35137b72155f507e5de8d892712ab96277aaed8cf8a36" + ), + blake2s256=hash_to_bytes( + "76d0346f44e5a27f6bafdd9c2befd304aff83780f93121d801ab6a1d4769db11" + ), + status="visible", + ctime=datetime.datetime(2019, 12, 1, tzinfo=datetime.timezone.utc), + ) + contents: Tuple[Content, ...] = (content, content2, content3) + + skipped_content = SkippedContent( + length=1024 * 1024 * 200, + sha1_git=hash_to_bytes("33e45d56f88993aae6a0198013efa80716fd8920"), + sha1=hash_to_bytes("43e45d56f88993aae6a0198013efa80716fd8920"), + sha256=hash_to_bytes( + "7bbd052ab054ef222c1c87be60cd191addedd24cc882d1f5f7f7be61dc61bb3a" + ), + blake2s256=hash_to_bytes( + "ade18b1adecb33f891ca36664da676e12c772cc193778aac9a137b8dc5834b9b" + ), + reason="Content too long", + status="absent", + origin="file:///dev/zero", + ) + skipped_content2 = SkippedContent( + length=1024 * 1024 * 300, + sha1_git=hash_to_bytes("44e45d56f88993aae6a0198013efa80716fd8921"), + sha1=hash_to_bytes("54e45d56f88993aae6a0198013efa80716fd8920"), + sha256=hash_to_bytes( + "8cbd052ab054ef222c1c87be60cd191addedd24cc882d1f5f7f7be61dc61bb3a" + ), + blake2s256=hash_to_bytes( + "9ce18b1adecb33f891ca36664da676e12c772cc193778aac9a137b8dc5834b9b" + ), + reason="Content too long", + status="absent", + ) + skipped_contents: Tuple[SkippedContent, ...] = (skipped_content, skipped_content2) + + directory5 = Directory(entries=()) + directory = Directory( + id=hash_to_bytes("34f335a750111ca0a8b64d8034faec9eedc396be"), + entries=tuple( + [ + DirectoryEntry( + name=b"foo", + type="file", + target=content.sha1_git, + perms=from_disk.DentryPerms.content, + ), + DirectoryEntry( + name=b"bar\xc3", + type="dir", + target=directory5.id, + perms=from_disk.DentryPerms.directory, + ), + ], + ), + ) + directory2 = Directory( + id=hash_to_bytes("8505808532953da7d2581741f01b29c04b1cb9ab"), + entries=tuple( + [ + DirectoryEntry( + name=b"oof", + type="file", + target=content2.sha1_git, + perms=from_disk.DentryPerms.content, + ) + ], + ), + ) + directory3 = Directory( + id=hash_to_bytes("4ea8c6b2f54445e5dd1a9d5bb2afd875d66f3150"), + entries=tuple( + [ + DirectoryEntry( + name=b"foo", + type="file", + target=content.sha1_git, + perms=from_disk.DentryPerms.content, + ), + DirectoryEntry( + name=b"subdir", + type="dir", + target=directory.id, + perms=from_disk.DentryPerms.directory, + ), + DirectoryEntry( + name=b"hello", + type="file", + target=directory5.id, + perms=from_disk.DentryPerms.content, + ), + ], + ), + ) + directory4 = Directory( + id=hash_to_bytes("377aa5fcd944fbabf502dbfda55cd14d33c8c3c6"), + entries=tuple( + [ + DirectoryEntry( + name=b"subdir1", + type="dir", + target=directory3.id, + perms=from_disk.DentryPerms.directory, + ) + ], + ), + ) + directories: Tuple[Directory, ...] = ( + directory2, + directory, + directory3, + directory4, + directory5, + ) + + revision = Revision( + id=hash_to_bytes("066b1b62dbfa033362092af468bf6cfabec230e7"), + message=b"hello", + author=Person( + name=b"Nicolas Dandrimont", + email=b"nicolas@example.com", + fullname=b"Nicolas Dandrimont ", + ), + date=TimestampWithTimezone( + timestamp=Timestamp(seconds=1234567890, microseconds=0), + offset=120, + negative_utc=False, + ), + committer=Person( + name=b"St\xc3fano Zacchiroli", + email=b"stefano@example.com", + fullname=b"St\xc3fano Zacchiroli ", + ), + committer_date=TimestampWithTimezone( + timestamp=Timestamp(seconds=1123456789, microseconds=0), + offset=120, + negative_utc=False, + ), + parents=(), + type=RevisionType.GIT, + directory=directory.id, + metadata={ + "checksums": {"sha1": "tarball-sha1", "sha256": "tarball-sha256",}, + "signed-off-by": "some-dude", + }, + extra_headers=( + (b"gpgsig", b"test123"), + (b"mergetag", b"foo\\bar"), + (b"mergetag", b"\x22\xaf\x89\x80\x01\x00"), + ), + synthetic=True, + ) + revision2 = Revision( + id=hash_to_bytes("df7a6f6a99671fb7f7343641aff983a314ef6161"), + message=b"hello again", + author=Person( + name=b"Roberto Dicosmo", + email=b"roberto@example.com", + fullname=b"Roberto Dicosmo ", + ), + date=TimestampWithTimezone( + timestamp=Timestamp(seconds=1234567843, microseconds=220000,), + offset=-720, + negative_utc=False, + ), + committer=Person( + name=b"tony", email=b"ar@dumont.fr", fullname=b"tony ", + ), + committer_date=TimestampWithTimezone( + timestamp=Timestamp(seconds=1123456789, microseconds=220000,), + offset=0, + negative_utc=False, + ), + parents=tuple([revision.id]), + type=RevisionType.GIT, + directory=directory2.id, + metadata=None, + extra_headers=(), + synthetic=False, + ) + revision3 = Revision( + id=hash_to_bytes("2cbd7bb22c653bbb23a29657852a50a01b591d46"), + message=b"a simple revision with no parents this time", + author=Person( + name=b"Roberto Dicosmo", + email=b"roberto@example.com", + fullname=b"Roberto Dicosmo ", + ), + date=TimestampWithTimezone( + timestamp=Timestamp(seconds=1234567843, microseconds=220000,), + offset=-720, + negative_utc=False, + ), + committer=Person( + name=b"tony", email=b"ar@dumont.fr", fullname=b"tony ", + ), + committer_date=TimestampWithTimezone( + timestamp=Timestamp(seconds=1127351742, microseconds=220000,), + offset=0, + negative_utc=False, + ), + parents=tuple([revision.id, revision2.id]), + type=RevisionType.GIT, + directory=directory2.id, + metadata=None, + extra_headers=(), + synthetic=True, + ) + revision4 = Revision( + id=hash_to_bytes("88cd5126fc958ed70089d5340441a1c2477bcc20"), + message=b"parent of self.revision2", + author=Person( + name=b"me", email=b"me@soft.heri", fullname=b"me ", + ), + date=TimestampWithTimezone( + timestamp=Timestamp(seconds=1234567843, microseconds=220000,), + offset=-720, + negative_utc=False, + ), + committer=Person( + name=b"committer-dude", + email=b"committer@dude.com", + fullname=b"committer-dude ", + ), + committer_date=TimestampWithTimezone( + timestamp=Timestamp(seconds=1244567843, microseconds=220000,), + offset=-720, + negative_utc=False, + ), + parents=tuple([revision3.id]), + type=RevisionType.GIT, + directory=directory.id, + metadata=None, + extra_headers=(), + synthetic=False, + ) + revisions: Tuple[Revision, ...] = (revision, revision2, revision3, revision4) + + origins: Tuple[Origin, ...] = ( + Origin(url="https://github.com/user1/repo1"), + Origin(url="https://github.com/user2/repo1"), + Origin(url="https://github.com/user3/repo1"), + Origin(url="https://gitlab.com/user1/repo1"), + Origin(url="https://gitlab.com/user2/repo1"), + Origin(url="https://forge.softwareheritage.org/source/repo1"), + ) + origin, origin2 = origins[:2] + + metadata_authority = MetadataAuthority( + type=MetadataAuthorityType.DEPOSIT_CLIENT, + url="http://hal.inria.example.com/", + metadata={"location": "France"}, + ) + metadata_authority2 = MetadataAuthority( + type=MetadataAuthorityType.REGISTRY, + url="http://wikidata.example.com/", + metadata={}, + ) + authorities: Tuple[MetadataAuthority, ...] = ( + metadata_authority, + metadata_authority2, + ) + + metadata_fetcher = MetadataFetcher( + name="swh-deposit", version="0.0.1", metadata={"sword_version": "2"}, + ) + metadata_fetcher2 = MetadataFetcher( + name="swh-example", version="0.0.1", metadata={}, + ) + fetchers: Tuple[MetadataFetcher, ...] = (metadata_fetcher, metadata_fetcher2) + + date_visit1 = datetime.datetime(2015, 1, 1, 23, 0, 0, tzinfo=datetime.timezone.utc) + date_visit2 = datetime.datetime(2017, 1, 1, 23, 0, 0, tzinfo=datetime.timezone.utc) + date_visit3 = datetime.datetime(2018, 1, 1, 23, 0, 0, tzinfo=datetime.timezone.utc) + + type_visit1 = "git" + type_visit2 = "hg" + type_visit3 = "deb" + + origin_visit = OriginVisit( + origin=origin.url, visit=1, date=date_visit1, type=type_visit1, + ) + origin_visit2 = OriginVisit( + origin=origin.url, visit=2, date=date_visit2, type=type_visit1, + ) + origin_visit3 = OriginVisit( + origin=origin2.url, visit=1, date=date_visit1, type=type_visit2, + ) + origin_visits: Tuple[OriginVisit, ...] = ( + origin_visit, + origin_visit2, + origin_visit3, + ) + + release = Release( + id=hash_to_bytes("a673e617fcc6234e29b2cad06b8245f96c415c61"), + name=b"v0.0.1", + author=Person( + name=b"olasd", email=b"nic@olasd.fr", fullname=b"olasd ", + ), + date=TimestampWithTimezone( + timestamp=Timestamp(seconds=1234567890, microseconds=0), + offset=42, + negative_utc=False, + ), + target=revision.id, + target_type=ObjectType.REVISION, + message=b"synthetic release", + synthetic=True, + ) + release2 = Release( + id=hash_to_bytes("6902bd4c82b7d19a421d224aedab2b74197e420d"), + name=b"v0.0.2", + author=Person( + name=b"tony", email=b"ar@dumont.fr", fullname=b"tony ", + ), + date=TimestampWithTimezone( + timestamp=Timestamp(seconds=1634366813, microseconds=0), + offset=-120, + negative_utc=False, + ), + target=revision2.id, + target_type=ObjectType.REVISION, + message=b"v0.0.2\nMisc performance improvements + bug fixes", + synthetic=False, + ) + release3 = Release( + id=hash_to_bytes("3e9050196aa288264f2a9d279d6abab8b158448b"), + name=b"v0.0.2", + author=Person( + name=b"tony", + email=b"tony@ardumont.fr", + fullname=b"tony ", + ), + date=TimestampWithTimezone( + timestamp=Timestamp(seconds=1634366813, microseconds=0), + offset=-120, + negative_utc=False, + ), + target=revision3.id, + target_type=ObjectType.REVISION, + message=b"yet another synthetic release", + synthetic=True, + ) + + releases: Tuple[Release, ...] = (release, release2, release3) + + snapshot = Snapshot( + id=hash_to_bytes("409ee1ff3f10d166714bc90581debfd0446dda57"), + branches={ + b"master": SnapshotBranch( + target=revision.id, target_type=TargetType.REVISION, ), - DirectoryEntry( - name=b"bar\xc3", - type="dir", - target=directory5.id, - perms=from_disk.DentryPerms.directory, + }, + ) + empty_snapshot = Snapshot( + id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={}, + ) + complete_snapshot = Snapshot( + id=hash_to_bytes("a56ce2d81c190023bb99a3a36279307522cb85f6"), + branches={ + b"directory": SnapshotBranch( + target=directory.id, target_type=TargetType.DIRECTORY, ), - ], - ), -) - -directory2 = Directory( - id=hash_to_bytes("8505808532953da7d2581741f01b29c04b1cb9ab"), - entries=tuple( - [ - DirectoryEntry( - name=b"oof", - type="file", - target=content2.sha1_git, - perms=from_disk.DentryPerms.content, - ) - ], - ), -) - -directory3 = Directory( - id=hash_to_bytes("4ea8c6b2f54445e5dd1a9d5bb2afd875d66f3150"), - entries=tuple( - [ - DirectoryEntry( - name=b"foo", - type="file", - target=content.sha1_git, - perms=from_disk.DentryPerms.content, + b"directory2": SnapshotBranch( + target=directory2.id, target_type=TargetType.DIRECTORY, ), - DirectoryEntry( - name=b"subdir", - type="dir", - target=directory.id, - perms=from_disk.DentryPerms.directory, + b"content": SnapshotBranch( + target=content.sha1_git, target_type=TargetType.CONTENT, ), - DirectoryEntry( - name=b"hello", - type="file", - target=directory5.id, - perms=from_disk.DentryPerms.content, + b"alias": SnapshotBranch(target=b"revision", target_type=TargetType.ALIAS,), + b"revision": SnapshotBranch( + target=revision.id, target_type=TargetType.REVISION, ), - ], - ), -) - -directory4 = Directory( - id=hash_to_bytes("377aa5fcd944fbabf502dbfda55cd14d33c8c3c6"), - entries=tuple( - [ - DirectoryEntry( - name=b"subdir1", - type="dir", - target=directory3.id, - perms=from_disk.DentryPerms.directory, - ) - ], - ), -) - -directories = (directory2, directory, directory3, directory4, directory5) - -minus_offset = datetime.timezone(datetime.timedelta(minutes=-120)) -plus_offset = datetime.timezone(datetime.timedelta(minutes=120)) - -revision = Revision( - id=hash_to_bytes("066b1b62dbfa033362092af468bf6cfabec230e7"), - message=b"hello", - author=Person( - name=b"Nicolas Dandrimont", - email=b"nicolas@example.com", - fullname=b"Nicolas Dandrimont ", - ), - date=TimestampWithTimezone( - timestamp=Timestamp(seconds=1234567890, microseconds=0), - offset=120, - negative_utc=False, - ), - committer=Person( - name=b"St\xc3fano Zacchiroli", - email=b"stefano@example.com", - fullname=b"St\xc3fano Zacchiroli ", - ), - committer_date=TimestampWithTimezone( - timestamp=Timestamp(seconds=1123456789, microseconds=0), - offset=120, - negative_utc=False, - ), - parents=(), - type=RevisionType.GIT, - directory=directory.id, - metadata={ - "checksums": {"sha1": "tarball-sha1", "sha256": "tarball-sha256",}, - "signed-off-by": "some-dude", - }, - extra_headers=( - (b"gpgsig", b"test123"), - (b"mergetag", b"foo\\bar"), - (b"mergetag", b"\x22\xaf\x89\x80\x01\x00"), - ), - synthetic=True, -) - -revision2 = Revision( - id=hash_to_bytes("df7a6f6a99671fb7f7343641aff983a314ef6161"), - message=b"hello again", - author=Person( - name=b"Roberto Dicosmo", - email=b"roberto@example.com", - fullname=b"Roberto Dicosmo ", - ), - date=TimestampWithTimezone( - timestamp=Timestamp(seconds=1234567843, microseconds=220000,), - offset=-720, - negative_utc=False, - ), - committer=Person( - name=b"tony", email=b"ar@dumont.fr", fullname=b"tony ", - ), - committer_date=TimestampWithTimezone( - timestamp=Timestamp(seconds=1123456789, microseconds=220000,), - offset=0, - negative_utc=False, - ), - parents=tuple([revision.id]), - type=RevisionType.GIT, - directory=directory2.id, - metadata=None, - extra_headers=(), - synthetic=False, -) - -revision3 = Revision( - id=hash_to_bytes("2cbd7bb22c653bbb23a29657852a50a01b591d46"), - message=b"a simple revision with no parents this time", - author=Person( - name=b"Roberto Dicosmo", - email=b"roberto@example.com", - fullname=b"Roberto Dicosmo ", - ), - date=TimestampWithTimezone( - timestamp=Timestamp(seconds=1234567843, microseconds=220000,), - offset=-720, - negative_utc=False, - ), - committer=Person( - name=b"tony", email=b"ar@dumont.fr", fullname=b"tony ", - ), - committer_date=TimestampWithTimezone( - timestamp=Timestamp(seconds=1127351742, microseconds=220000,), - offset=0, - negative_utc=False, - ), - parents=tuple([revision.id, revision2.id]), - type=RevisionType.GIT, - directory=directory2.id, - metadata=None, - extra_headers=(), - synthetic=True, -) - -revision4 = Revision( - id=hash_to_bytes("88cd5126fc958ed70089d5340441a1c2477bcc20"), - message=b"parent of self.revision2", - author=Person(name=b"me", email=b"me@soft.heri", fullname=b"me ",), - date=TimestampWithTimezone( - timestamp=Timestamp(seconds=1234567843, microseconds=220000,), - offset=-720, - negative_utc=False, - ), - committer=Person( - name=b"committer-dude", - email=b"committer@dude.com", - fullname=b"committer-dude ", - ), - committer_date=TimestampWithTimezone( - timestamp=Timestamp(seconds=1244567843, microseconds=220000,), - offset=-720, - negative_utc=False, - ), - parents=tuple([revision3.id]), - type=RevisionType.GIT, - directory=directory.id, - metadata=None, - extra_headers=(), - synthetic=False, -) - -revisions = (revision, revision2, revision3, revision4) - -origins = ( - Origin(url="https://github.com/user1/repo1"), - Origin(url="https://github.com/user2/repo1"), - Origin(url="https://github.com/user3/repo1"), - Origin(url="https://gitlab.com/user1/repo1"), - Origin(url="https://gitlab.com/user2/repo1"), - Origin(url="https://forge.softwareheritage.org/source/repo1"), -) - -origin, origin2 = origins[:2] - -metadata_authority = MetadataAuthority( - type=MetadataAuthorityType.DEPOSIT_CLIENT, - url="http://hal.inria.example.com/", - metadata={"location": "France"}, -) -metadata_authority2 = MetadataAuthority( - type=MetadataAuthorityType.REGISTRY, - url="http://wikidata.example.com/", - metadata={}, -) - -authorities = (metadata_authority, metadata_authority2) - -metadata_fetcher = MetadataFetcher( - name="swh-deposit", version="0.0.1", metadata={"sword_version": "2"}, -) -metadata_fetcher2 = MetadataFetcher(name="swh-example", version="0.0.1", metadata={},) - -fetchers = (metadata_fetcher, metadata_fetcher2) - -date_visit1 = datetime.datetime(2015, 1, 1, 23, 0, 0, tzinfo=datetime.timezone.utc) -type_visit1 = "git" - -date_visit2 = datetime.datetime(2017, 1, 1, 23, 0, 0, tzinfo=datetime.timezone.utc) -type_visit2 = "hg" - -date_visit3 = datetime.datetime(2018, 1, 1, 23, 0, 0, tzinfo=datetime.timezone.utc) -type_visit3 = "deb" - -origin_visit = OriginVisit( - origin=origin.url, visit=1, date=date_visit1, type=type_visit1, -) - -origin_visit2 = OriginVisit( - origin=origin.url, visit=2, date=date_visit2, type=type_visit1, -) - -origin_visit3 = OriginVisit( - origin=origin2.url, visit=1, date=date_visit1, type=type_visit2, -) - -origin_visits = (origin_visit, origin_visit2, origin_visit3) - -release = Release( - id=hash_to_bytes("a673e617fcc6234e29b2cad06b8245f96c415c61"), - name=b"v0.0.1", - author=Person( - name=b"olasd", email=b"nic@olasd.fr", fullname=b"olasd ", - ), - date=TimestampWithTimezone( - timestamp=Timestamp(seconds=1234567890, microseconds=0), - offset=42, - negative_utc=False, - ), - target=revision.id, - target_type=ObjectType.REVISION, - message=b"synthetic release", - synthetic=True, -) - -release2 = Release( - id=hash_to_bytes("6902bd4c82b7d19a421d224aedab2b74197e420d"), - name=b"v0.0.2", - author=Person( - name=b"tony", email=b"ar@dumont.fr", fullname=b"tony ", - ), - date=TimestampWithTimezone( - timestamp=Timestamp(seconds=1634366813, microseconds=0), - offset=-120, - negative_utc=False, - ), - target=revision2.id, - target_type=ObjectType.REVISION, - message=b"v0.0.2\nMisc performance improvements + bug fixes", - synthetic=False, -) - -release3 = Release( - id=hash_to_bytes("3e9050196aa288264f2a9d279d6abab8b158448b"), - name=b"v0.0.2", - author=Person( - name=b"tony", email=b"tony@ardumont.fr", fullname=b"tony ", - ), - date=TimestampWithTimezone( - timestamp=Timestamp(seconds=1634366813, microseconds=0), - offset=-120, - negative_utc=False, - ), - target=revision3.id, - target_type=ObjectType.REVISION, - message=b"yet another synthetic release", - synthetic=True, -) - -releases = (release, release2, release3) - -snapshot = Snapshot( - id=hash_to_bytes("409ee1ff3f10d166714bc90581debfd0446dda57"), - branches={ - b"master": SnapshotBranch(target=revision.id, target_type=TargetType.REVISION,), - }, -) - -empty_snapshot = Snapshot( - id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={}, -) - -complete_snapshot = Snapshot( - id=hash_to_bytes("a56ce2d81c190023bb99a3a36279307522cb85f6"), - branches={ - b"directory": SnapshotBranch( - target=directory.id, target_type=TargetType.DIRECTORY, + b"release": SnapshotBranch( + target=release.id, target_type=TargetType.RELEASE, + ), + b"snapshot": SnapshotBranch( + target=empty_snapshot.id, target_type=TargetType.SNAPSHOT, + ), + b"dangling": None, + }, + ) + + snapshots: Tuple[Snapshot, ...] = (snapshot, empty_snapshot, complete_snapshot) + + content_metadata1 = RawExtrinsicMetadata( + type=MetadataTargetType.CONTENT, + id=parse_swhid(f"swh:1:cnt:{hash_to_hex(content.sha1_git)}"), + origin=origin.url, + discovery_date=datetime.datetime( + 2015, 1, 1, 21, 0, 0, tzinfo=datetime.timezone.utc ), - b"directory2": SnapshotBranch( - target=directory2.id, target_type=TargetType.DIRECTORY, + authority=attr.evolve(metadata_authority, metadata=None), + fetcher=attr.evolve(metadata_fetcher, metadata=None), + format="json", + metadata=b'{"foo": "bar"}', + ) + content_metadata2 = RawExtrinsicMetadata( + type=MetadataTargetType.CONTENT, + id=parse_swhid(f"swh:1:cnt:{hash_to_hex(content.sha1_git)}"), + origin=origin2.url, + discovery_date=datetime.datetime( + 2017, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc ), - b"content": SnapshotBranch( - target=content.sha1_git, target_type=TargetType.CONTENT, + authority=attr.evolve(metadata_authority, metadata=None), + fetcher=attr.evolve(metadata_fetcher, metadata=None), + format="yaml", + metadata=b"foo: bar", + ) + content_metadata3 = RawExtrinsicMetadata( + type=MetadataTargetType.CONTENT, + id=parse_swhid(f"swh:1:cnt:{hash_to_hex(content.sha1_git)}"), + discovery_date=datetime.datetime( + 2017, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc ), - b"alias": SnapshotBranch(target=b"revision", target_type=TargetType.ALIAS,), - b"revision": SnapshotBranch( - target=revision.id, target_type=TargetType.REVISION, + authority=attr.evolve(metadata_authority2, metadata=None), + fetcher=attr.evolve(metadata_fetcher2, metadata=None), + format="yaml", + metadata=b"foo: bar", + origin=origin.url, + visit=42, + snapshot=parse_swhid(f"swh:1:snp:{hash_to_hex(snapshot.id)}"), + release=parse_swhid(f"swh:1:rel:{hash_to_hex(release.id)}"), + revision=parse_swhid(f"swh:1:rev:{hash_to_hex(revision.id)}"), + directory=parse_swhid(f"swh:1:dir:{hash_to_hex(directory.id)}"), + path=b"/foo/bar", + ) + + content_metadata: Tuple[RawExtrinsicMetadata, ...] = ( + content_metadata1, + content_metadata2, + content_metadata3, + ) + + origin_metadata1 = RawExtrinsicMetadata( + type=MetadataTargetType.ORIGIN, + id=origin.url, + discovery_date=datetime.datetime( + 2015, 1, 1, 21, 0, 0, tzinfo=datetime.timezone.utc ), - b"release": SnapshotBranch(target=release.id, target_type=TargetType.RELEASE,), - b"snapshot": SnapshotBranch( - target=empty_snapshot.id, target_type=TargetType.SNAPSHOT, + authority=attr.evolve(metadata_authority, metadata=None), + fetcher=attr.evolve(metadata_fetcher, metadata=None), + format="json", + metadata=b'{"foo": "bar"}', + ) + origin_metadata2 = RawExtrinsicMetadata( + type=MetadataTargetType.ORIGIN, + id=origin.url, + discovery_date=datetime.datetime( + 2017, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc ), - b"dangling": None, - }, -) - -snapshots = (snapshot, empty_snapshot, complete_snapshot) - -content_metadata1 = RawExtrinsicMetadata( - type=MetadataTargetType.CONTENT, - id=parse_swhid(f"swh:1:cnt:{hash_to_hex(content.sha1_git)}"), - origin=origin.url, - discovery_date=datetime.datetime( - 2015, 1, 1, 21, 0, 0, tzinfo=datetime.timezone.utc - ), - authority=attr.evolve(metadata_authority, metadata=None), - fetcher=attr.evolve(metadata_fetcher, metadata=None), - format="json", - metadata=b'{"foo": "bar"}', -) -content_metadata2 = RawExtrinsicMetadata( - type=MetadataTargetType.CONTENT, - id=parse_swhid(f"swh:1:cnt:{hash_to_hex(content.sha1_git)}"), - origin=origin2.url, - discovery_date=datetime.datetime( - 2017, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc - ), - authority=attr.evolve(metadata_authority, metadata=None), - fetcher=attr.evolve(metadata_fetcher, metadata=None), - format="yaml", - metadata=b"foo: bar", -) -content_metadata3 = RawExtrinsicMetadata( - type=MetadataTargetType.CONTENT, - id=parse_swhid(f"swh:1:cnt:{hash_to_hex(content.sha1_git)}"), - discovery_date=datetime.datetime( - 2017, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc - ), - authority=attr.evolve(metadata_authority2, metadata=None), - fetcher=attr.evolve(metadata_fetcher2, metadata=None), - format="yaml", - metadata=b"foo: bar", - origin=origin.url, - visit=42, - snapshot=parse_swhid(f"swh:1:snp:{hash_to_hex(snapshot.id)}"), - release=parse_swhid(f"swh:1:rel:{hash_to_hex(release.id)}"), - revision=parse_swhid(f"swh:1:rev:{hash_to_hex(revision.id)}"), - directory=parse_swhid(f"swh:1:dir:{hash_to_hex(directory.id)}"), - path=b"/foo/bar", -) - -content_metadata = ( - content_metadata1, - content_metadata2, - content_metadata3, -) - -origin_metadata1 = RawExtrinsicMetadata( - type=MetadataTargetType.ORIGIN, - id=origin.url, - discovery_date=datetime.datetime( - 2015, 1, 1, 21, 0, 0, tzinfo=datetime.timezone.utc - ), - authority=attr.evolve(metadata_authority, metadata=None), - fetcher=attr.evolve(metadata_fetcher, metadata=None), - format="json", - metadata=b'{"foo": "bar"}', -) -origin_metadata2 = RawExtrinsicMetadata( - type=MetadataTargetType.ORIGIN, - id=origin.url, - discovery_date=datetime.datetime( - 2017, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc - ), - authority=attr.evolve(metadata_authority, metadata=None), - fetcher=attr.evolve(metadata_fetcher, metadata=None), - format="yaml", - metadata=b"foo: bar", -) -origin_metadata3 = RawExtrinsicMetadata( - type=MetadataTargetType.ORIGIN, - id=origin.url, - discovery_date=datetime.datetime( - 2017, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc - ), - authority=attr.evolve(metadata_authority2, metadata=None), - fetcher=attr.evolve(metadata_fetcher2, metadata=None), - format="yaml", - metadata=b"foo: bar", -) - -origin_metadata = ( - origin_metadata1, - origin_metadata2, - origin_metadata3, -) + authority=attr.evolve(metadata_authority, metadata=None), + fetcher=attr.evolve(metadata_fetcher, metadata=None), + format="yaml", + metadata=b"foo: bar", + ) + origin_metadata3 = RawExtrinsicMetadata( + type=MetadataTargetType.ORIGIN, + id=origin.url, + discovery_date=datetime.datetime( + 2017, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc + ), + authority=attr.evolve(metadata_authority2, metadata=None), + fetcher=attr.evolve(metadata_fetcher2, metadata=None), + format="yaml", + metadata=b"foo: bar", + ) + + origin_metadata: Tuple[RawExtrinsicMetadata, ...] = ( + origin_metadata1, + origin_metadata2, + origin_metadata3, + ) diff --git a/swh/storage/tests/test_buffer.py b/swh/storage/tests/test_buffer.py index e98eb768..9e3757f0 100644 --- a/swh/storage/tests/test_buffer.py +++ b/swh/storage/tests/test_buffer.py @@ -1,401 +1,399 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.storage import get_storage def get_storage_with_buffer_config(**buffer_config): storage_config = { "cls": "pipeline", "steps": [{"cls": "buffer", **buffer_config}, {"cls": "memory"},], } return get_storage(**storage_config) def test_buffering_proxy_storage_content_threshold_not_hit(sample_data): - contents = sample_data["content"][:2] + contents = sample_data.contents[:2] contents_dict = [c.to_dict() for c in contents] storage = get_storage_with_buffer_config(min_batch_size={"content": 10,}) s = storage.content_add(contents) assert s == {} # contents have not been written to storage missing_contents = storage.content_missing(contents_dict) assert set(missing_contents) == set([contents[0].sha1, contents[1].sha1]) s = storage.flush() assert s == { "content:add": 1 + 1, "content:add:bytes": contents[0].length + contents[1].length, } missing_contents = storage.content_missing(contents_dict) assert list(missing_contents) == [] def test_buffering_proxy_storage_content_threshold_nb_hit(sample_data): - content = sample_data["content"][0] + content = sample_data.content content_dict = content.to_dict() storage = get_storage_with_buffer_config(min_batch_size={"content": 1,}) s = storage.content_add([content]) assert s == { "content:add": 1, "content:add:bytes": content.length, } missing_contents = storage.content_missing([content_dict]) assert list(missing_contents) == [] s = storage.flush() assert s == {} def test_buffering_proxy_storage_content_deduplicate(sample_data): - contents = sample_data["content"][:2] + contents = sample_data.contents[:2] storage = get_storage_with_buffer_config(min_batch_size={"content": 2,}) s = storage.content_add([contents[0], contents[0]]) assert s == {} s = storage.content_add([contents[0]]) assert s == {} s = storage.content_add([contents[1]]) assert s == { "content:add": 1 + 1, "content:add:bytes": contents[0].length + contents[1].length, } missing_contents = storage.content_missing([c.to_dict() for c in contents]) assert list(missing_contents) == [] s = storage.flush() assert s == {} def test_buffering_proxy_storage_content_threshold_bytes_hit(sample_data): - contents = sample_data["content"][:2] + contents = sample_data.contents[:2] content_bytes_min_batch_size = 2 storage = get_storage_with_buffer_config( min_batch_size={"content": 10, "content_bytes": content_bytes_min_batch_size,} ) assert contents[0].length > content_bytes_min_batch_size s = storage.content_add([contents[0]]) assert s == { "content:add": 1, "content:add:bytes": contents[0].length, } missing_contents = storage.content_missing([contents[0].to_dict()]) assert list(missing_contents) == [] s = storage.flush() assert s == {} def test_buffering_proxy_storage_skipped_content_threshold_not_hit(sample_data): - contents = sample_data["skipped_content"] + contents = sample_data.skipped_contents contents_dict = [c.to_dict() for c in contents] storage = get_storage_with_buffer_config(min_batch_size={"skipped_content": 10,}) s = storage.skipped_content_add([contents[0], contents[1]]) assert s == {} # contents have not been written to storage missing_contents = storage.skipped_content_missing(contents_dict) assert {c["sha1"] for c in missing_contents} == {c.sha1 for c in contents} s = storage.flush() assert s == {"skipped_content:add": 1 + 1} missing_contents = storage.skipped_content_missing(contents_dict) assert list(missing_contents) == [] def test_buffering_proxy_storage_skipped_content_threshold_nb_hit(sample_data): - contents = sample_data["skipped_content"] + contents = sample_data.skipped_contents storage = get_storage_with_buffer_config(min_batch_size={"skipped_content": 1,}) s = storage.skipped_content_add([contents[0]]) assert s == {"skipped_content:add": 1} missing_contents = storage.skipped_content_missing([contents[0].to_dict()]) assert list(missing_contents) == [] s = storage.flush() assert s == {} def test_buffering_proxy_storage_skipped_content_deduplicate(sample_data): - contents = sample_data["skipped_content"][:2] + contents = sample_data.skipped_contents[:2] storage = get_storage_with_buffer_config(min_batch_size={"skipped_content": 2,}) s = storage.skipped_content_add([contents[0], contents[0]]) assert s == {} s = storage.skipped_content_add([contents[0]]) assert s == {} s = storage.skipped_content_add([contents[1]]) assert s == { "skipped_content:add": 1 + 1, } missing_contents = storage.skipped_content_missing([c.to_dict() for c in contents]) assert list(missing_contents) == [] s = storage.flush() assert s == {} def test_buffering_proxy_storage_directory_threshold_not_hit(sample_data): - directories = sample_data["directory"] + directory = sample_data.directory storage = get_storage_with_buffer_config(min_batch_size={"directory": 10,}) - s = storage.directory_add([directories[0]]) + s = storage.directory_add([directory]) assert s == {} - directory_id = directories[0].id - missing_directories = storage.directory_missing([directory_id]) - assert list(missing_directories) == [directory_id] + missing_directories = storage.directory_missing([directory.id]) + assert list(missing_directories) == [directory.id] s = storage.flush() assert s == { "directory:add": 1, } - missing_directories = storage.directory_missing([directory_id]) + missing_directories = storage.directory_missing([directory.id]) assert list(missing_directories) == [] def test_buffering_proxy_storage_directory_threshold_hit(sample_data): - directories = sample_data["directory"] + directory = sample_data.directory storage = get_storage_with_buffer_config(min_batch_size={"directory": 1,}) - s = storage.directory_add([directories[0]]) + s = storage.directory_add([directory]) assert s == { "directory:add": 1, } - missing_directories = storage.directory_missing([directories[0].id]) + missing_directories = storage.directory_missing([directory.id]) assert list(missing_directories) == [] s = storage.flush() assert s == {} def test_buffering_proxy_storage_directory_deduplicate(sample_data): - directories = sample_data["directory"][:2] + directories = sample_data.directories[:2] storage = get_storage_with_buffer_config(min_batch_size={"directory": 2,}) s = storage.directory_add([directories[0], directories[0]]) assert s == {} s = storage.directory_add([directories[0]]) assert s == {} s = storage.directory_add([directories[1]]) assert s == { "directory:add": 1 + 1, } missing_directories = storage.directory_missing([d.id for d in directories]) assert list(missing_directories) == [] s = storage.flush() assert s == {} def test_buffering_proxy_storage_revision_threshold_not_hit(sample_data): - revisions = sample_data["revision"] + revision = sample_data.revision storage = get_storage_with_buffer_config(min_batch_size={"revision": 10,}) - s = storage.revision_add([revisions[0]]) + s = storage.revision_add([revision]) assert s == {} - revision_id = revisions[0].id - missing_revisions = storage.revision_missing([revision_id]) - assert list(missing_revisions) == [revision_id] + missing_revisions = storage.revision_missing([revision.id]) + assert list(missing_revisions) == [revision.id] s = storage.flush() assert s == { "revision:add": 1, } - missing_revisions = storage.revision_missing([revision_id]) + missing_revisions = storage.revision_missing([revision.id]) assert list(missing_revisions) == [] def test_buffering_proxy_storage_revision_threshold_hit(sample_data): - revisions = sample_data["revision"] + revision = sample_data.revision storage = get_storage_with_buffer_config(min_batch_size={"revision": 1,}) - s = storage.revision_add([revisions[0]]) + s = storage.revision_add([revision]) assert s == { "revision:add": 1, } - missing_revisions = storage.revision_missing([revisions[0].id]) + missing_revisions = storage.revision_missing([revision.id]) assert list(missing_revisions) == [] s = storage.flush() assert s == {} def test_buffering_proxy_storage_revision_deduplicate(sample_data): - revisions = sample_data["revision"][:2] + revisions = sample_data.revisions[:2] storage = get_storage_with_buffer_config(min_batch_size={"revision": 2,}) s = storage.revision_add([revisions[0], revisions[0]]) assert s == {} s = storage.revision_add([revisions[0]]) assert s == {} s = storage.revision_add([revisions[1]]) assert s == { "revision:add": 1 + 1, } missing_revisions = storage.revision_missing([r.id for r in revisions]) assert list(missing_revisions) == [] s = storage.flush() assert s == {} def test_buffering_proxy_storage_release_threshold_not_hit(sample_data): - releases = sample_data["release"] + releases = sample_data.releases threshold = 10 assert len(releases) < threshold storage = get_storage_with_buffer_config( min_batch_size={"release": threshold,} # configuration set ) s = storage.release_add(releases) assert s == {} release_ids = [r.id for r in releases] missing_releases = storage.release_missing(release_ids) assert list(missing_releases) == release_ids s = storage.flush() assert s == { "release:add": len(releases), } missing_releases = storage.release_missing(release_ids) assert list(missing_releases) == [] def test_buffering_proxy_storage_release_threshold_hit(sample_data): - releases = sample_data["release"] + releases = sample_data.releases threshold = 2 assert len(releases) > threshold storage = get_storage_with_buffer_config( min_batch_size={"release": threshold,} # configuration set ) s = storage.release_add(releases) assert s == { "release:add": len(releases), } release_ids = [r.id for r in releases] missing_releases = storage.release_missing(release_ids) assert list(missing_releases) == [] s = storage.flush() assert s == {} def test_buffering_proxy_storage_release_deduplicate(sample_data): - releases = sample_data["release"][:2] + releases = sample_data.releases[:2] storage = get_storage_with_buffer_config(min_batch_size={"release": 2,}) s = storage.release_add([releases[0], releases[0]]) assert s == {} s = storage.release_add([releases[0]]) assert s == {} s = storage.release_add([releases[1]]) assert s == { "release:add": 1 + 1, } missing_releases = storage.release_missing([r.id for r in releases]) assert list(missing_releases) == [] s = storage.flush() assert s == {} def test_buffering_proxy_storage_clear(sample_data): """Clear operation on buffer """ threshold = 10 - contents = sample_data["content"] + contents = sample_data.contents assert 0 < len(contents) < threshold - skipped_contents = sample_data["skipped_content"] + skipped_contents = sample_data.skipped_contents assert 0 < len(skipped_contents) < threshold - directories = sample_data["directory"] + directories = sample_data.directories assert 0 < len(directories) < threshold - revisions = sample_data["revision"] + revisions = sample_data.revisions assert 0 < len(revisions) < threshold - releases = sample_data["release"] + releases = sample_data.releases assert 0 < len(releases) < threshold storage = get_storage_with_buffer_config( min_batch_size={ "content": threshold, "skipped_content": threshold, "directory": threshold, "revision": threshold, "release": threshold, } ) s = storage.content_add(contents) assert s == {} s = storage.skipped_content_add(skipped_contents) assert s == {} s = storage.directory_add(directories) assert s == {} s = storage.revision_add(revisions) assert s == {} s = storage.release_add(releases) assert s == {} assert len(storage._objects["content"]) == len(contents) assert len(storage._objects["skipped_content"]) == len(skipped_contents) assert len(storage._objects["directory"]) == len(directories) assert len(storage._objects["revision"]) == len(revisions) assert len(storage._objects["release"]) == len(releases) # clear only content from the buffer s = storage.clear_buffers(["content"]) assert s is None # specific clear operation on specific object type content only touched # them assert len(storage._objects["content"]) == 0 assert len(storage._objects["skipped_content"]) == len(skipped_contents) assert len(storage._objects["directory"]) == len(directories) assert len(storage._objects["revision"]) == len(revisions) assert len(storage._objects["release"]) == len(releases) # clear current buffer from all object types s = storage.clear_buffers() assert s is None assert len(storage._objects["content"]) == 0 assert len(storage._objects["skipped_content"]) == 0 assert len(storage._objects["directory"]) == 0 assert len(storage._objects["revision"]) == 0 assert len(storage._objects["release"]) == 0 diff --git a/swh/storage/tests/test_cassandra.py b/swh/storage/tests/test_cassandra.py index c1e4d4f9..870ee59c 100644 --- a/swh/storage/tests/test_cassandra.py +++ b/swh/storage/tests/test_cassandra.py @@ -1,396 +1,396 @@ # Copyright (C) 2018-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import attr import os import signal import socket import subprocess import time from collections import namedtuple import pytest from swh.storage import get_storage from swh.storage.cassandra import create_keyspace from swh.storage.cassandra.schema import TABLES, HASH_ALGORITHMS from swh.storage.utils import now from swh.storage.tests.test_storage import TestStorage as _TestStorage from swh.storage.tests.test_storage import ( TestStorageGeneratedData as _TestStorageGeneratedData, ) CONFIG_TEMPLATE = """ data_file_directories: - {data_dir}/data commitlog_directory: {data_dir}/commitlog hints_directory: {data_dir}/hints saved_caches_directory: {data_dir}/saved_caches commitlog_sync: periodic commitlog_sync_period_in_ms: 1000000 partitioner: org.apache.cassandra.dht.Murmur3Partitioner endpoint_snitch: SimpleSnitch seed_provider: - class_name: org.apache.cassandra.locator.SimpleSeedProvider parameters: - seeds: "127.0.0.1" storage_port: {storage_port} native_transport_port: {native_transport_port} start_native_transport: true listen_address: 127.0.0.1 enable_user_defined_functions: true # speed-up by disabling period saving to disk key_cache_save_period: 0 row_cache_save_period: 0 trickle_fsync: false commitlog_sync_period_in_ms: 100000 """ def free_port(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.bind(("127.0.0.1", 0)) port = sock.getsockname()[1] sock.close() return port def wait_for_peer(addr, port): wait_until = time.time() + 20 while time.time() < wait_until: try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((addr, port)) except ConnectionRefusedError: time.sleep(0.1) else: sock.close() return True return False @pytest.fixture(scope="session") def cassandra_cluster(tmpdir_factory): cassandra_conf = tmpdir_factory.mktemp("cassandra_conf") cassandra_data = tmpdir_factory.mktemp("cassandra_data") cassandra_log = tmpdir_factory.mktemp("cassandra_log") native_transport_port = free_port() storage_port = free_port() jmx_port = free_port() with open(str(cassandra_conf.join("cassandra.yaml")), "w") as fd: fd.write( CONFIG_TEMPLATE.format( data_dir=str(cassandra_data), storage_port=storage_port, native_transport_port=native_transport_port, ) ) if os.environ.get("SWH_CASSANDRA_LOG"): stdout = stderr = None else: stdout = stderr = subprocess.DEVNULL cassandra_bin = os.environ.get("SWH_CASSANDRA_BIN", "/usr/sbin/cassandra") proc = subprocess.Popen( [ cassandra_bin, "-Dcassandra.config=file://%s/cassandra.yaml" % cassandra_conf, "-Dcassandra.logdir=%s" % cassandra_log, "-Dcassandra.jmx.local.port=%d" % jmx_port, "-Dcassandra-foreground=yes", ], start_new_session=True, env={ "MAX_HEAP_SIZE": "300M", "HEAP_NEWSIZE": "50M", "JVM_OPTS": "-Xlog:gc=error:file=%s/gc.log" % cassandra_log, }, stdout=stdout, stderr=stderr, ) running = wait_for_peer("127.0.0.1", native_transport_port) if running: yield (["127.0.0.1"], native_transport_port) if not running or os.environ.get("SWH_CASSANDRA_LOG"): debug_log_path = str(cassandra_log.join("debug.log")) if os.path.exists(debug_log_path): with open(debug_log_path) as fd: print(fd.read()) if not running: raise Exception("cassandra process stopped unexpectedly.") pgrp = os.getpgid(proc.pid) os.killpg(pgrp, signal.SIGKILL) class RequestHandler: def on_request(self, rf): if hasattr(rf.message, "query"): print() print(rf.message.query) @pytest.fixture(scope="session") def keyspace(cassandra_cluster): (hosts, port) = cassandra_cluster keyspace = os.urandom(10).hex() create_keyspace(hosts, keyspace, port) return keyspace # tests are executed using imported classes (TestStorage and # TestStorageGeneratedData) using overloaded swh_storage fixture # below @pytest.fixture def swh_storage_backend_config(cassandra_cluster, keyspace): (hosts, port) = cassandra_cluster storage_config = dict( cls="cassandra", hosts=hosts, port=port, keyspace=keyspace, journal_writer={"cls": "memory",}, objstorage={"cls": "memory", "args": {},}, ) yield storage_config storage = get_storage(**storage_config) for table in TABLES: storage._cql_runner._session.execute('TRUNCATE TABLE "%s"' % table) storage._cql_runner._cluster.shutdown() @pytest.mark.cassandra class TestCassandraStorage(_TestStorage): def test_content_add_murmur3_collision(self, swh_storage, mocker, sample_data): """The Murmur3 token is used as link from index tables to the main table; and non-matching contents with colliding murmur3-hash are filtered-out when reading the main table. This test checks the content methods do filter out these collision. """ called = 0 - cont, cont2 = sample_data["content"][:2] + cont, cont2 = sample_data.contents[:2] # always return a token def mock_cgtfsh(algo, hash_): nonlocal called called += 1 assert algo in ("sha1", "sha1_git") return [123456] mocker.patch.object( swh_storage._cql_runner, "content_get_tokens_from_single_hash", mock_cgtfsh, ) # For all tokens, always return cont Row = namedtuple("Row", HASH_ALGORITHMS) def mock_cgft(token): nonlocal called called += 1 return [Row(**{algo: getattr(cont, algo) for algo in HASH_ALGORITHMS})] mocker.patch.object( swh_storage._cql_runner, "content_get_from_token", mock_cgft ) actual_result = swh_storage.content_add([cont2]) assert called == 4 assert actual_result == { "content:add": 1, "content:add:bytes": cont2.length, } def test_content_get_metadata_murmur3_collision( self, swh_storage, mocker, sample_data ): """The Murmur3 token is used as link from index tables to the main table; and non-matching contents with colliding murmur3-hash are filtered-out when reading the main table. This test checks the content methods do filter out these collisions. """ called = 0 - cont, cont2 = [attr.evolve(c, ctime=now()) for c in sample_data["content"][:2]] + cont, cont2 = [attr.evolve(c, ctime=now()) for c in sample_data.contents[:2]] # always return a token def mock_cgtfsh(algo, hash_): nonlocal called called += 1 assert algo in ("sha1", "sha1_git") return [123456] mocker.patch.object( swh_storage._cql_runner, "content_get_tokens_from_single_hash", mock_cgtfsh, ) # For all tokens, always return cont and cont2 cols = list(set(cont.to_dict()) - {"data"}) Row = namedtuple("Row", cols) def mock_cgft(token): nonlocal called called += 1 return [ Row(**{col: getattr(cont, col) for col in cols}) for cont in [cont, cont2] ] mocker.patch.object( swh_storage._cql_runner, "content_get_from_token", mock_cgft ) actual_result = swh_storage.content_get_metadata([cont.sha1]) assert called == 2 # dropping extra column not returned expected_cont = attr.evolve(cont, data=None, ctime=None).to_dict() del expected_cont["ctime"] # forced to pop it as to_dict does not # but cont2 should be filtered out assert actual_result == {cont.sha1: [expected_cont]} def test_content_find_murmur3_collision(self, swh_storage, mocker, sample_data): """The Murmur3 token is used as link from index tables to the main table; and non-matching contents with colliding murmur3-hash are filtered-out when reading the main table. This test checks the content methods do filter out these collisions. """ called = 0 - cont, cont2 = [attr.evolve(c, ctime=now()) for c in sample_data["content"][:2]] + cont, cont2 = [attr.evolve(c, ctime=now()) for c in sample_data.contents[:2]] # always return a token def mock_cgtfsh(algo, hash_): nonlocal called called += 1 assert algo in ("sha1", "sha1_git") return [123456] mocker.patch.object( swh_storage._cql_runner, "content_get_tokens_from_single_hash", mock_cgtfsh, ) # For all tokens, always return cont and cont2 cols = list(set(cont.to_dict()) - {"data"}) Row = namedtuple("Row", cols) def mock_cgft(token): nonlocal called called += 1 return [ Row(**{col: getattr(cont, col) for col in cols}) for cont in [cont, cont2] ] mocker.patch.object( swh_storage._cql_runner, "content_get_from_token", mock_cgft ) expected_cont = attr.evolve(cont, data=None).to_dict() actual_result = swh_storage.content_find({"sha1": cont.sha1}) assert called == 2 # but cont2 should be filtered out assert actual_result == [expected_cont] @pytest.mark.skip("content_update is not yet implemented for Cassandra") def test_content_update(self): pass @pytest.mark.skip( 'The "person" table of the pgsql is a legacy thing, and not ' "supported by the cassandra backend." ) def test_person_fullname_unicity(self): pass @pytest.mark.skip( 'The "person" table of the pgsql is a legacy thing, and not ' "supported by the cassandra backend." ) def test_person_get(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_origin_count(self): pass @pytest.mark.cassandra class TestCassandraStorageGeneratedData(_TestStorageGeneratedData): @pytest.mark.skip("Not supported by Cassandra") def test_origin_count(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_origin_get_range(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_origin_get_range_from_zero(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_generate_content_get_range_limit(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_generate_content_get_range_no_limit(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_generate_content_get_range(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_generate_content_get_range_empty(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_generate_content_get_range_limit_none(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_generate_content_get_range_full(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_origin_count_with_visit_no_visits(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_origin_count_with_visit_with_visits_and_snapshot(self): pass @pytest.mark.skip("Not supported by Cassandra") def test_origin_count_with_visit_with_visits_no_snapshot(self): pass diff --git a/swh/storage/tests/test_filter.py b/swh/storage/tests/test_filter.py index f01e23a9..558c39c9 100644 --- a/swh/storage/tests/test_filter.py +++ b/swh/storage/tests/test_filter.py @@ -1,131 +1,131 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import attr import pytest from swh.storage import get_storage @pytest.fixture def swh_storage(): storage_config = { "cls": "pipeline", "steps": [{"cls": "filter"}, {"cls": "memory"},], } return get_storage(**storage_config) def test_filtering_proxy_storage_content(swh_storage, sample_data): - sample_content = sample_data["content"][0] + sample_content = sample_data.content content = next(swh_storage.content_get([sample_content.sha1])) assert not content s = swh_storage.content_add([sample_content]) assert s == { "content:add": 1, "content:add:bytes": sample_content.length, } content = next(swh_storage.content_get([sample_content.sha1])) assert content is not None s = swh_storage.content_add([sample_content]) assert s == { "content:add": 0, "content:add:bytes": 0, } def test_filtering_proxy_storage_skipped_content(swh_storage, sample_data): - sample_content = sample_data["skipped_content"][0] + sample_content = sample_data.skipped_content sample_content_dict = sample_content.to_dict() content = next(swh_storage.skipped_content_missing([sample_content_dict])) assert content["sha1"] == sample_content.sha1 s = swh_storage.skipped_content_add([sample_content]) assert s == { "skipped_content:add": 1, } content = list(swh_storage.skipped_content_missing([sample_content_dict])) assert content == [] s = swh_storage.skipped_content_add([sample_content]) assert s == { "skipped_content:add": 0, } def test_filtering_proxy_storage_skipped_content_missing_sha1_git( swh_storage, sample_data ): sample_contents = [ - attr.evolve(c, sha1_git=None) for c in sample_data["skipped_content"] + attr.evolve(c, sha1_git=None) for c in sample_data.skipped_contents ] sample_content, sample_content2 = [c.to_dict() for c in sample_contents[:2]] content = next(swh_storage.skipped_content_missing([sample_content])) assert content["sha1"] == sample_content["sha1"] s = swh_storage.skipped_content_add([sample_contents[0]]) assert s == { "skipped_content:add": 1, } content = list(swh_storage.skipped_content_missing([sample_content])) assert content == [] s = swh_storage.skipped_content_add([sample_contents[1]]) assert s == { "skipped_content:add": 1, } content = list(swh_storage.skipped_content_missing([sample_content2])) assert content == [] def test_filtering_proxy_storage_revision(swh_storage, sample_data): - sample_revision = sample_data["revision"][0] + sample_revision = sample_data.revision revision = next(swh_storage.revision_get([sample_revision.id])) assert not revision s = swh_storage.revision_add([sample_revision]) assert s == { "revision:add": 1, } revision = next(swh_storage.revision_get([sample_revision.id])) assert revision is not None s = swh_storage.revision_add([sample_revision]) assert s == { "revision:add": 0, } def test_filtering_proxy_storage_directory(swh_storage, sample_data): - sample_directory = sample_data["directory"][0] + sample_directory = sample_data.directory directory = next(swh_storage.directory_missing([sample_directory.id])) assert directory s = swh_storage.directory_add([sample_directory]) assert s == { "directory:add": 1, } directory = list(swh_storage.directory_missing([sample_directory.id])) assert not directory s = swh_storage.directory_add([sample_directory]) assert s == { "directory:add": 0, } diff --git a/swh/storage/tests/test_pytest_plugin.py b/swh/storage/tests/test_pytest_plugin.py index 7c599844..5a59c5e9 100644 --- a/swh/storage/tests/test_pytest_plugin.py +++ b/swh/storage/tests/test_pytest_plugin.py @@ -1,38 +1,19 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information - -from swh.model.model import BaseModel from swh.storage.interface import StorageInterface +from swh.storage.tests.storage_data import StorageData def test_sample_data(sample_data): - assert set(sample_data.keys()) == set( - [ - "content", - "skipped_content", - "directory", - "revision", - "release", - "snapshot", - "origin", - "origin_visit", - "fetcher", - "authority", - "origin_metadata", - "content_metadata", - ] - ) - for object_type, objs in sample_data.items(): - for obj in objs: - assert isinstance(obj, BaseModel) + assert isinstance(sample_data, StorageData) def test_swh_storage(swh_storage: StorageInterface): assert isinstance(swh_storage, StorageInterface) is not None def test_swh_storage_backend_config(swh_storage_backend_config): assert isinstance(swh_storage_backend_config, dict) diff --git a/swh/storage/tests/test_retry.py b/swh/storage/tests/test_retry.py index 7b235304..782e8f35 100644 --- a/swh/storage/tests/test_retry.py +++ b/swh/storage/tests/test_retry.py @@ -1,836 +1,834 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import attr from unittest.mock import call import psycopg2 import pytest -from swh.model.model import ( - OriginVisit, - MetadataTargetType, -) +from swh.model.model import MetadataTargetType from swh.storage.exc import HashCollision, StorageArgumentException -from .storage_data import date_visit1 - @pytest.fixture def monkeypatch_sleep(monkeypatch, swh_storage): """In test context, we don't want to wait, make test faster """ from swh.storage.retry import RetryingProxyStorage for method_name, method in RetryingProxyStorage.__dict__.items(): if "_add" in method_name or "_update" in method_name: monkeypatch.setattr(method.retry, "sleep", lambda x: None) return monkeypatch @pytest.fixture def fake_hash_collision(sample_data): return HashCollision("sha1", "38762cf7f55934b34d179ae6a4c80cadccbb7f0a", []) @pytest.fixture def swh_storage_backend_config(): yield { "cls": "pipeline", "steps": [{"cls": "retry"}, {"cls": "memory"},], } def test_retrying_proxy_storage_content_add(swh_storage, sample_data): """Standard content_add works as before """ - sample_content = sample_data["content"][0] - + sample_content = sample_data.content content = next(swh_storage.content_get([sample_content.sha1])) assert not content s = swh_storage.content_add([sample_content]) assert s == { "content:add": 1, "content:add:bytes": sample_content.length, } content = next(swh_storage.content_get([sample_content.sha1])) assert content["sha1"] == sample_content.sha1 def test_retrying_proxy_storage_content_add_with_retry( monkeypatch_sleep, swh_storage, sample_data, mocker, fake_hash_collision, ): """Multiple retries for hash collision and psycopg2 error but finally ok """ mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.content_add") mock_memory.side_effect = [ # first try goes ko fake_hash_collision, # second try goes ko psycopg2.IntegrityError("content already inserted"), # ok then! {"content:add": 1}, ] - sample_content = sample_data["content"][0] + sample_content = sample_data.content content = next(swh_storage.content_get([sample_content.sha1])) assert not content s = swh_storage.content_add([sample_content]) assert s == {"content:add": 1} mock_memory.assert_has_calls( [call([sample_content]), call([sample_content]), call([sample_content]),] ) def test_retrying_proxy_swh_storage_content_add_failure( swh_storage, sample_data, mocker ): """Unfiltered errors are raising without retry """ mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.content_add") mock_memory.side_effect = StorageArgumentException("Refuse to add content always!") - sample_content = sample_data["content"][0] + sample_content = sample_data.content content = next(swh_storage.content_get([sample_content.sha1])) assert not content with pytest.raises(StorageArgumentException, match="Refuse to add"): swh_storage.content_add([sample_content]) assert mock_memory.call_count == 1 def test_retrying_proxy_storage_content_add_metadata(swh_storage, sample_data): """Standard content_add_metadata works as before """ - sample_content = sample_data["content"][0] + sample_content = sample_data.content content = attr.evolve(sample_content, data=None) pk = content.sha1 content_metadata = swh_storage.content_get_metadata([pk]) assert not content_metadata[pk] s = swh_storage.content_add_metadata([content]) assert s == { "content:add": 1, } content_metadata = swh_storage.content_get_metadata([pk]) assert len(content_metadata[pk]) == 1 assert content_metadata[pk][0]["sha1"] == pk def test_retrying_proxy_storage_content_add_metadata_with_retry( monkeypatch_sleep, swh_storage, sample_data, mocker, fake_hash_collision ): """Multiple retries for hash collision and psycopg2 error but finally ok """ mock_memory = mocker.patch( "swh.storage.in_memory.InMemoryStorage.content_add_metadata" ) mock_memory.side_effect = [ # first try goes ko fake_hash_collision, # second try goes ko psycopg2.IntegrityError("content_metadata already inserted"), # ok then! {"content:add": 1}, ] - sample_content = sample_data["content"][0] + sample_content = sample_data.content content = attr.evolve(sample_content, data=None) s = swh_storage.content_add_metadata([content]) assert s == {"content:add": 1} mock_memory.assert_has_calls( [call([content]), call([content]), call([content]),] ) def test_retrying_proxy_swh_storage_content_add_metadata_failure( swh_storage, sample_data, mocker ): """Unfiltered errors are raising without retry """ mock_memory = mocker.patch( "swh.storage.in_memory.InMemoryStorage.content_add_metadata" ) mock_memory.side_effect = StorageArgumentException( "Refuse to add content_metadata!" ) - sample_content = sample_data["content"][0] + sample_content = sample_data.content content = attr.evolve(sample_content, data=None) pk = content.sha1 content_metadata = swh_storage.content_get_metadata([pk]) assert not content_metadata[pk] with pytest.raises(StorageArgumentException, match="Refuse to add"): swh_storage.content_add_metadata([content]) assert mock_memory.call_count == 1 def test_retrying_proxy_storage_skipped_content_add(swh_storage, sample_data): """Standard skipped_content_add works as before """ - sample_content = sample_data["skipped_content"][0] + sample_content = sample_data.skipped_content sample_content_dict = sample_content.to_dict() skipped_contents = list(swh_storage.skipped_content_missing([sample_content_dict])) assert len(skipped_contents) == 1 s = swh_storage.skipped_content_add([sample_content]) assert s == { "skipped_content:add": 1, } skipped_content = list(swh_storage.skipped_content_missing([sample_content_dict])) assert len(skipped_content) == 0 def test_retrying_proxy_storage_skipped_content_add_with_retry( monkeypatch_sleep, swh_storage, sample_data, mocker, fake_hash_collision ): """Multiple retries for hash collision and psycopg2 error but finally ok """ mock_memory = mocker.patch( "swh.storage.in_memory.InMemoryStorage.skipped_content_add" ) mock_memory.side_effect = [ # 1st & 2nd try goes ko fake_hash_collision, psycopg2.IntegrityError("skipped_content already inserted"), # ok then! {"skipped_content:add": 1}, ] - sample_content = sample_data["skipped_content"][0] + sample_content = sample_data.skipped_content s = swh_storage.skipped_content_add([sample_content]) assert s == {"skipped_content:add": 1} mock_memory.assert_has_calls( [call([sample_content]), call([sample_content]), call([sample_content]),] ) def test_retrying_proxy_swh_storage_skipped_content_add_failure( swh_storage, sample_data, mocker ): """Unfiltered errors are raising without retry """ mock_memory = mocker.patch( "swh.storage.in_memory.InMemoryStorage.skipped_content_add" ) mock_memory.side_effect = StorageArgumentException( "Refuse to add content_metadata!" ) - sample_content = sample_data["skipped_content"][0] + sample_content = sample_data.skipped_content sample_content_dict = sample_content.to_dict() skipped_contents = list(swh_storage.skipped_content_missing([sample_content_dict])) assert len(skipped_contents) == 1 with pytest.raises(StorageArgumentException, match="Refuse to add"): swh_storage.skipped_content_add([sample_content]) skipped_contents = list(swh_storage.skipped_content_missing([sample_content_dict])) assert len(skipped_contents) == 1 assert mock_memory.call_count == 1 def test_retrying_proxy_swh_storage_origin_visit_add(swh_storage, sample_data): """Standard origin_visit_add works as before """ - origin = sample_data["origin"][0] + origin = sample_data.origin + visit = sample_data.origin_visit + assert visit.origin == origin.url swh_storage.origin_add([origin]) origins = list(swh_storage.origin_visit_get(origin.url)) assert not origins - visit = OriginVisit(origin=origin.url, date=date_visit1, type="hg") origin_visit = swh_storage.origin_visit_add([visit])[0] assert origin_visit.origin == origin.url assert isinstance(origin_visit.visit, int) origin_visit = next(swh_storage.origin_visit_get(origin.url)) assert origin_visit["origin"] == origin.url assert isinstance(origin_visit["visit"], int) def test_retrying_proxy_swh_storage_origin_visit_add_retry( monkeypatch_sleep, swh_storage, sample_data, mocker, fake_hash_collision ): """Multiple retries for hash collision and psycopg2 error but finally ok """ - origin = sample_data["origin"][1] + origin = sample_data.origin + visit = sample_data.origin_visit + assert visit.origin == origin.url + swh_storage.origin_add([origin]) mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.origin_visit_add") - visit = OriginVisit(origin=origin.url, date=date_visit1, type="git") mock_memory.side_effect = [ # first try goes ko fake_hash_collision, # second try goes ko psycopg2.IntegrityError("origin already inserted"), # ok then! [visit], ] origins = list(swh_storage.origin_visit_get(origin.url)) assert not origins r = swh_storage.origin_visit_add([visit]) assert r == [visit] mock_memory.assert_has_calls( [call([visit]), call([visit]), call([visit]),] ) def test_retrying_proxy_swh_storage_origin_visit_add_failure( swh_storage, sample_data, mocker ): """Unfiltered errors are raising without retry """ mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.origin_visit_add") mock_memory.side_effect = StorageArgumentException("Refuse to add origin always!") - origin = sample_data["origin"][0] + origin = sample_data.origin + visit = sample_data.origin_visit + assert visit.origin == origin.url origins = list(swh_storage.origin_visit_get(origin.url)) assert not origins with pytest.raises(StorageArgumentException, match="Refuse to add"): - visit = OriginVisit(origin=origin.url, date=date_visit1, type="svn",) swh_storage.origin_visit_add([visit]) mock_memory.assert_has_calls( [call([visit]),] ) def test_retrying_proxy_storage_metadata_fetcher_add(swh_storage, sample_data): """Standard metadata_fetcher_add works as before """ - fetcher = sample_data["fetcher"][0] + fetcher = sample_data.metadata_fetcher metadata_fetcher = swh_storage.metadata_fetcher_get(fetcher.name, fetcher.version) assert not metadata_fetcher swh_storage.metadata_fetcher_add([fetcher]) actual_fetcher = swh_storage.metadata_fetcher_get(fetcher.name, fetcher.version) assert actual_fetcher == fetcher def test_retrying_proxy_storage_metadata_fetcher_add_with_retry( monkeypatch_sleep, swh_storage, sample_data, mocker, fake_hash_collision, ): """Multiple retries for hash collision and psycopg2 error but finally ok """ - fetcher = sample_data["fetcher"][0] + fetcher = sample_data.metadata_fetcher mock_memory = mocker.patch( "swh.storage.in_memory.InMemoryStorage.metadata_fetcher_add" ) mock_memory.side_effect = [ # first try goes ko fake_hash_collision, # second try goes ko psycopg2.IntegrityError("metadata_fetcher already inserted"), # ok then! [fetcher], ] actual_fetcher = swh_storage.metadata_fetcher_get(fetcher.name, fetcher.version) assert not actual_fetcher swh_storage.metadata_fetcher_add([fetcher]) mock_memory.assert_has_calls( [call([fetcher]), call([fetcher]), call([fetcher]),] ) def test_retrying_proxy_swh_storage_metadata_fetcher_add_failure( swh_storage, sample_data, mocker ): """Unfiltered errors are raising without retry """ mock_memory = mocker.patch( "swh.storage.in_memory.InMemoryStorage.metadata_fetcher_add" ) mock_memory.side_effect = StorageArgumentException( "Refuse to add metadata_fetcher always!" ) - fetcher = sample_data["fetcher"][0] + fetcher = sample_data.metadata_fetcher actual_fetcher = swh_storage.metadata_fetcher_get(fetcher.name, fetcher.version) assert not actual_fetcher with pytest.raises(StorageArgumentException, match="Refuse to add"): swh_storage.metadata_fetcher_add([fetcher]) assert mock_memory.call_count == 1 def test_retrying_proxy_storage_metadata_authority_add(swh_storage, sample_data): """Standard metadata_authority_add works as before """ - authority = sample_data["authority"][0] + authority = sample_data.metadata_authority assert not swh_storage.metadata_authority_get(authority.type, authority.url) swh_storage.metadata_authority_add([authority]) actual_authority = swh_storage.metadata_authority_get(authority.type, authority.url) assert actual_authority == authority def test_retrying_proxy_storage_metadata_authority_add_with_retry( monkeypatch_sleep, swh_storage, sample_data, mocker, fake_hash_collision, ): """Multiple retries for hash collision and psycopg2 error but finally ok """ - authority = sample_data["authority"][0] + authority = sample_data.metadata_authority mock_memory = mocker.patch( "swh.storage.in_memory.InMemoryStorage.metadata_authority_add" ) mock_memory.side_effect = [ # first try goes ko fake_hash_collision, # second try goes ko psycopg2.IntegrityError("foo bar"), # ok then! None, ] assert not swh_storage.metadata_authority_get(authority.type, authority.url) swh_storage.metadata_authority_add([authority]) mock_memory.assert_has_calls( [call([authority]), call([authority]), call([authority])] ) def test_retrying_proxy_swh_storage_metadata_authority_add_failure( swh_storage, sample_data, mocker ): """Unfiltered errors are raising without retry """ mock_memory = mocker.patch( "swh.storage.in_memory.InMemoryStorage.metadata_authority_add" ) mock_memory.side_effect = StorageArgumentException( "Refuse to add authority_id always!" ) - authority = sample_data["authority"][0] + authority = sample_data.metadata_authority swh_storage.metadata_authority_get(authority.type, authority.url) with pytest.raises(StorageArgumentException, match="Refuse to add"): swh_storage.metadata_authority_add([authority]) assert mock_memory.call_count == 1 def test_retrying_proxy_storage_object_metadata_add(swh_storage, sample_data): """Standard object_metadata_add works as before """ - origin = sample_data["origin"][0] - ori_meta = sample_data["origin_metadata"][0] + origin = sample_data.origin + ori_meta = sample_data.origin_metadata1 assert origin.url == ori_meta.id swh_storage.origin_add([origin]) - swh_storage.metadata_authority_add([sample_data["authority"][0]]) - swh_storage.metadata_fetcher_add([sample_data["fetcher"][0]]) + swh_storage.metadata_authority_add([sample_data.metadata_authority]) + swh_storage.metadata_fetcher_add([sample_data.metadata_fetcher]) origin_metadata = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, ori_meta.id, ori_meta.authority ) assert origin_metadata["next_page_token"] is None assert not origin_metadata["results"] swh_storage.object_metadata_add([ori_meta]) origin_metadata = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, ori_meta.id, ori_meta.authority ) assert origin_metadata def test_retrying_proxy_storage_object_metadata_add_with_retry( monkeypatch_sleep, swh_storage, sample_data, mocker, fake_hash_collision, ): """Multiple retries for hash collision and psycopg2 error but finally ok """ - origin = sample_data["origin"][0] - ori_meta = sample_data["origin_metadata"][0] + origin = sample_data.origin + ori_meta = sample_data.origin_metadata1 assert origin.url == ori_meta.id swh_storage.origin_add([origin]) - swh_storage.metadata_authority_add([sample_data["authority"][0]]) - swh_storage.metadata_fetcher_add([sample_data["fetcher"][0]]) + swh_storage.metadata_authority_add([sample_data.metadata_authority]) + swh_storage.metadata_fetcher_add([sample_data.metadata_fetcher]) mock_memory = mocker.patch( "swh.storage.in_memory.InMemoryStorage.object_metadata_add" ) mock_memory.side_effect = [ # first try goes ko fake_hash_collision, # second try goes ko psycopg2.IntegrityError("foo bar"), # ok then! None, ] # No exception raised as insertion finally came through swh_storage.object_metadata_add([ori_meta]) mock_memory.assert_has_calls( [ # 3 calls, as long as error raised call([ori_meta]), call([ori_meta]), call([ori_meta]), ] ) def test_retrying_proxy_swh_storage_object_metadata_add_failure( swh_storage, sample_data, mocker ): """Unfiltered errors are raising without retry """ mock_memory = mocker.patch( "swh.storage.in_memory.InMemoryStorage.object_metadata_add" ) mock_memory.side_effect = StorageArgumentException("Refuse to add always!") - origin = sample_data["origin"][0] - ori_meta = sample_data["origin_metadata"][0] + origin = sample_data.origin + ori_meta = sample_data.origin_metadata1 assert origin.url == ori_meta.id swh_storage.origin_add([origin]) with pytest.raises(StorageArgumentException, match="Refuse to add"): swh_storage.object_metadata_add([ori_meta]) assert mock_memory.call_count == 1 def test_retrying_proxy_storage_directory_add(swh_storage, sample_data): """Standard directory_add works as before """ - sample_dir = sample_data["directory"][0] + sample_dir = sample_data.directory directory = swh_storage.directory_get_random() # no directory assert not directory s = swh_storage.directory_add([sample_dir]) assert s == { "directory:add": 1, } directory_id = swh_storage.directory_get_random() # only 1 assert directory_id == sample_dir.id def test_retrying_proxy_storage_directory_add_with_retry( monkeypatch_sleep, swh_storage, sample_data, mocker, fake_hash_collision ): """Multiple retries for hash collision and psycopg2 error but finally ok """ mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.directory_add") mock_memory.side_effect = [ # first try goes ko fake_hash_collision, # second try goes ko psycopg2.IntegrityError("directory already inserted"), # ok then! {"directory:add": 1}, ] - sample_dir = sample_data["directory"][1] + sample_dir = sample_data.directories[1] directory_id = swh_storage.directory_get_random() # no directory assert not directory_id s = swh_storage.directory_add([sample_dir]) assert s == { "directory:add": 1, } mock_memory.assert_has_calls( [call([sample_dir]), call([sample_dir]), call([sample_dir]),] ) def test_retrying_proxy_swh_storage_directory_add_failure( swh_storage, sample_data, mocker ): """Unfiltered errors are raising without retry """ mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.directory_add") mock_memory.side_effect = StorageArgumentException( "Refuse to add directory always!" ) - sample_dir = sample_data["directory"][0] + sample_dir = sample_data.directory directory_id = swh_storage.directory_get_random() # no directory assert not directory_id with pytest.raises(StorageArgumentException, match="Refuse to add"): swh_storage.directory_add([sample_dir]) assert mock_memory.call_count == 1 def test_retrying_proxy_storage_revision_add(swh_storage, sample_data): """Standard revision_add works as before """ - sample_rev = sample_data["revision"][0] + sample_rev = sample_data.revision revision = next(swh_storage.revision_get([sample_rev.id])) assert not revision s = swh_storage.revision_add([sample_rev]) assert s == { "revision:add": 1, } revision = next(swh_storage.revision_get([sample_rev.id])) assert revision["id"] == sample_rev.id def test_retrying_proxy_storage_revision_add_with_retry( monkeypatch_sleep, swh_storage, sample_data, mocker, fake_hash_collision ): """Multiple retries for hash collision and psycopg2 error but finally ok """ mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.revision_add") mock_memory.side_effect = [ # first try goes ko fake_hash_collision, # second try goes ko psycopg2.IntegrityError("revision already inserted"), # ok then! {"revision:add": 1}, ] - sample_rev = sample_data["revision"][0] + sample_rev = sample_data.revision revision = next(swh_storage.revision_get([sample_rev.id])) assert not revision s = swh_storage.revision_add([sample_rev]) assert s == { "revision:add": 1, } mock_memory.assert_has_calls( [call([sample_rev]), call([sample_rev]), call([sample_rev]),] ) def test_retrying_proxy_swh_storage_revision_add_failure( swh_storage, sample_data, mocker ): """Unfiltered errors are raising without retry """ mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.revision_add") mock_memory.side_effect = StorageArgumentException("Refuse to add revision always!") - sample_rev = sample_data["revision"][0] + sample_rev = sample_data.revision revision = next(swh_storage.revision_get([sample_rev.id])) assert not revision with pytest.raises(StorageArgumentException, match="Refuse to add"): swh_storage.revision_add([sample_rev]) assert mock_memory.call_count == 1 def test_retrying_proxy_storage_release_add(swh_storage, sample_data): """Standard release_add works as before """ - sample_rel = sample_data["release"][0] + sample_rel = sample_data.release release = next(swh_storage.release_get([sample_rel.id])) assert not release s = swh_storage.release_add([sample_rel]) assert s == { "release:add": 1, } release = next(swh_storage.release_get([sample_rel.id])) assert release["id"] == sample_rel.id def test_retrying_proxy_storage_release_add_with_retry( monkeypatch_sleep, swh_storage, sample_data, mocker, fake_hash_collision ): """Multiple retries for hash collision and psycopg2 error but finally ok """ mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.release_add") mock_memory.side_effect = [ # first try goes ko fake_hash_collision, # second try goes ko psycopg2.IntegrityError("release already inserted"), # ok then! {"release:add": 1}, ] - sample_rel = sample_data["release"][0] + sample_rel = sample_data.release release = next(swh_storage.release_get([sample_rel.id])) assert not release s = swh_storage.release_add([sample_rel]) assert s == { "release:add": 1, } mock_memory.assert_has_calls( [call([sample_rel]), call([sample_rel]), call([sample_rel]),] ) def test_retrying_proxy_swh_storage_release_add_failure( swh_storage, sample_data, mocker ): """Unfiltered errors are raising without retry """ mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.release_add") mock_memory.side_effect = StorageArgumentException("Refuse to add release always!") - sample_rel = sample_data["release"][0] + sample_rel = sample_data.release release = next(swh_storage.release_get([sample_rel.id])) assert not release with pytest.raises(StorageArgumentException, match="Refuse to add"): swh_storage.release_add([sample_rel]) assert mock_memory.call_count == 1 def test_retrying_proxy_storage_snapshot_add(swh_storage, sample_data): """Standard snapshot_add works as before """ - sample_snap = sample_data["snapshot"][0] + sample_snap = sample_data.snapshot snapshot = swh_storage.snapshot_get(sample_snap.id) assert not snapshot s = swh_storage.snapshot_add([sample_snap]) assert s == { "snapshot:add": 1, } snapshot = swh_storage.snapshot_get(sample_snap.id) assert snapshot["id"] == sample_snap.id def test_retrying_proxy_storage_snapshot_add_with_retry( monkeypatch_sleep, swh_storage, sample_data, mocker, fake_hash_collision ): """Multiple retries for hash collision and psycopg2 error but finally ok """ mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.snapshot_add") mock_memory.side_effect = [ # first try goes ko fake_hash_collision, # second try goes ko psycopg2.IntegrityError("snapshot already inserted"), # ok then! {"snapshot:add": 1}, ] - sample_snap = sample_data["snapshot"][0] + sample_snap = sample_data.snapshot snapshot = swh_storage.snapshot_get(sample_snap.id) assert not snapshot s = swh_storage.snapshot_add([sample_snap]) assert s == { "snapshot:add": 1, } mock_memory.assert_has_calls( [call([sample_snap]), call([sample_snap]), call([sample_snap]),] ) def test_retrying_proxy_swh_storage_snapshot_add_failure( swh_storage, sample_data, mocker ): """Unfiltered errors are raising without retry """ mock_memory = mocker.patch("swh.storage.in_memory.InMemoryStorage.snapshot_add") mock_memory.side_effect = StorageArgumentException("Refuse to add snapshot always!") - sample_snap = sample_data["snapshot"][0] + sample_snap = sample_data.snapshot snapshot = swh_storage.snapshot_get(sample_snap.id) assert not snapshot with pytest.raises(StorageArgumentException, match="Refuse to add"): swh_storage.snapshot_add([sample_snap]) assert mock_memory.call_count == 1 diff --git a/swh/storage/tests/test_revision_bw_compat.py b/swh/storage/tests/test_revision_bw_compat.py index 3e0d131f..83216a34 100644 --- a/swh/storage/tests/test_revision_bw_compat.py +++ b/swh/storage/tests/test_revision_bw_compat.py @@ -1,47 +1,47 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import attr from swh.core.utils import decode_with_escape from swh.model.model import Revision from swh.storage import get_storage from swh.storage.tests.test_storage import db_transaction def headers_to_db(git_headers): return [[key, decode_with_escape(value)] for key, value in git_headers] def test_revision_extra_header_in_metadata(swh_storage_backend_config, sample_data): storage = get_storage(**swh_storage_backend_config) - rev = sample_data["revision"][0] + rev = sample_data.revision md_w_extra = dict( rev.metadata.items(), extra_headers=headers_to_db( [ ["gpgsig", b"test123"], ["mergetag", b"foo\\bar"], ["mergetag", b"\x22\xaf\x89\x80\x01\x00"], ] ), ) bw_rev = attr.evolve(rev, extra_headers=()) object.__setattr__(bw_rev, "metadata", md_w_extra) assert bw_rev.extra_headers == () assert storage.revision_add([bw_rev]) == {"revision:add": 1} # check data in the db are old format with db_transaction(storage) as (_, cur): cur.execute("SELECT metadata, extra_headers FROM revision") metadata, extra_headers = cur.fetchone() assert extra_headers == [] assert metadata == bw_rev.metadata # check the Revision build from revision_get is the original, "new style", Revision assert [Revision.from_dict(x) for x in storage.revision_get([rev.id])] == [rev] diff --git a/swh/storage/tests/test_storage.py b/swh/storage/tests/test_storage.py index ad38ad1e..f806611a 100644 --- a/swh/storage/tests/test_storage.py +++ b/swh/storage/tests/test_storage.py @@ -1,4118 +1,4176 @@ # Copyright (C) 2015-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import inspect import itertools import math import queue import random import threading from collections import defaultdict from contextlib import contextmanager from datetime import timedelta from unittest.mock import Mock import attr import pytest from hypothesis import given, strategies, settings, HealthCheck from typing import ClassVar, Optional from swh.model import from_disk from swh.model.hashutil import hash_to_bytes from swh.model.identifiers import SWHID from swh.model.model import ( Content, MetadataTargetType, Origin, OriginVisit, OriginVisitStatus, Person, Release, Revision, Snapshot, ) from swh.model.hypothesis_strategies import objects from swh.storage import get_storage from swh.storage.converters import origin_url_to_sha1 as sha1 from swh.storage.exc import HashCollision, StorageArgumentException from swh.storage.interface import StorageInterface from swh.storage.utils import content_hex_hashes, now -from .storage_data import data - @contextmanager def db_transaction(storage): with storage.db() as db: with db.transaction() as cur: yield db, cur def transform_entries(dir_, *, prefix=b""): for ent in dir_.entries: yield { "dir_id": dir_.id, "type": ent.type, "target": ent.target, "name": prefix + ent.name, "perms": ent.perms, "status": None, "sha1": None, "sha1_git": None, "sha256": None, "length": None, } def cmpdir(directory): return (directory["type"], directory["dir_id"]) def assert_contents_ok( expected_contents, actual_contents, keys_to_check={"sha1", "data"} ): """Assert that a given list of contents matches on a given set of keys. """ for k in keys_to_check: expected_list = set([c.get(k) for c in expected_contents]) actual_list = set([c.get(k) for c in actual_contents]) assert actual_list == expected_list, k def round_to_milliseconds(date): """Round datetime to milliseconds before insertion, so equality doesn't fail after a round-trip through a DB (eg. Cassandra) """ return date.replace(microsecond=(date.microsecond // 1000) * 1000) def test_round_to_milliseconds(): date = now() for (ms, expected_ms) in [(0, 0), (1000, 1000), (555555, 555000), (999500, 999000)]: date = date.replace(microsecond=ms) actual_date = round_to_milliseconds(date) assert actual_date.microsecond == expected_ms class LazyContent(Content): def with_data(self): - raw_data = data.content.data - return Content.from_dict({**self.to_dict(), "data": raw_data}) + return Content.from_dict({**self.to_dict(), "data": b"42\n"}) class TestStorage: """Main class for Storage testing. This class is used as-is to test local storage (see TestLocalStorage below) and remote storage (see TestRemoteStorage in test_remote_storage.py. We need to have the two classes inherit from this base class separately to avoid nosetests running the tests from the base class twice. """ maxDiff = None # type: ClassVar[Optional[int]] def test_types(self, swh_storage_backend_config): """Checks all methods of StorageInterface are implemented by this backend, and that they have the same signature.""" # Create an instance of the protocol (which cannot be instantiated # directly, so this creates a subclass, then instantiates it) interface = type("_", (StorageInterface,), {})() storage = get_storage(**swh_storage_backend_config) assert "content_add" in dir(interface) missing_methods = [] for meth_name in dir(interface): if meth_name.startswith("_"): continue interface_meth = getattr(interface, meth_name) try: concrete_meth = getattr(storage, meth_name) except AttributeError: if not getattr(interface_meth, "deprecated_endpoint", False): # The backend is missing a (non-deprecated) endpoint missing_methods.append(meth_name) continue expected_signature = inspect.signature(interface_meth) actual_signature = inspect.signature(concrete_meth) assert expected_signature == actual_signature, meth_name assert missing_methods == [] def test_check_config(self, swh_storage): assert swh_storage.check_config(check_write=True) assert swh_storage.check_config(check_write=False) def test_content_add(self, swh_storage, sample_data): - cont = sample_data["content"][0] + cont = sample_data.content insertion_start_time = now() actual_result = swh_storage.content_add([cont]) insertion_end_time = now() assert actual_result == { "content:add": 1, "content:add:bytes": cont.length, } assert list(swh_storage.content_get([cont.sha1])) == [ {"sha1": cont.sha1, "data": cont.data} ] expected_cont = attr.evolve(cont, data=None) contents = [ obj for (obj_type, obj) in swh_storage.journal_writer.journal.objects if obj_type == "content" ] assert len(contents) == 1 for obj in contents: assert insertion_start_time <= obj.ctime assert obj.ctime <= insertion_end_time assert obj == expected_cont swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["content"] == 1 def test_content_add_from_generator(self, swh_storage, sample_data): - cont = sample_data["content"][0] + cont = sample_data.content def _cnt_gen(): yield cont actual_result = swh_storage.content_add(_cnt_gen()) assert actual_result == { "content:add": 1, "content:add:bytes": cont.length, } swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["content"] == 1 def test_content_add_from_lazy_content(self, swh_storage, sample_data): - cont = sample_data["content"][0] + cont = sample_data.content lazy_content = LazyContent.from_dict(cont.to_dict()) insertion_start_time = now() actual_result = swh_storage.content_add([lazy_content]) insertion_end_time = now() assert actual_result == { "content:add": 1, "content:add:bytes": cont.length, } # the fact that we retrieve the content object from the storage with # the correct 'data' field ensures it has been 'called' assert list(swh_storage.content_get([cont.sha1])) == [ {"sha1": cont.sha1, "data": cont.data} ] expected_cont = attr.evolve(lazy_content, data=None, ctime=None) contents = [ obj for (obj_type, obj) in swh_storage.journal_writer.journal.objects if obj_type == "content" ] assert len(contents) == 1 for obj in contents: assert insertion_start_time <= obj.ctime assert obj.ctime <= insertion_end_time assert attr.evolve(obj, ctime=None).to_dict() == expected_cont.to_dict() swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["content"] == 1 def test_content_get_missing(self, swh_storage, sample_data): - cont, cont2 = sample_data["content"][:2] + cont, cont2 = sample_data.contents[:2] swh_storage.content_add([cont]) # Query a single missing content results = list(swh_storage.content_get([cont2.sha1])) assert results == [None] # Check content_get does not abort after finding a missing content results = list(swh_storage.content_get([cont.sha1, cont2.sha1])) assert results == [{"sha1": cont.sha1, "data": cont.data}, None] # Check content_get does not discard found countent when it finds # a missing content. results = list(swh_storage.content_get([cont2.sha1, cont.sha1])) assert results == [None, {"sha1": cont.sha1, "data": cont.data}] def test_content_add_different_input(self, swh_storage, sample_data): - cont, cont2 = sample_data["content"][:2] + cont, cont2 = sample_data.contents[:2] actual_result = swh_storage.content_add([cont, cont2]) assert actual_result == { "content:add": 2, "content:add:bytes": cont.length + cont2.length, } def test_content_add_twice(self, swh_storage, sample_data): - cont, cont2 = sample_data["content"][:2] + cont, cont2 = sample_data.contents[:2] actual_result = swh_storage.content_add([cont]) assert actual_result == { "content:add": 1, "content:add:bytes": cont.length, } assert len(swh_storage.journal_writer.journal.objects) == 1 actual_result = swh_storage.content_add([cont, cont2]) assert actual_result == { "content:add": 1, "content:add:bytes": cont2.length, } assert 2 <= len(swh_storage.journal_writer.journal.objects) <= 3 assert len(swh_storage.content_find(cont.to_dict())) == 1 assert len(swh_storage.content_find(cont2.to_dict())) == 1 def test_content_add_collision(self, swh_storage, sample_data): - cont1 = sample_data["content"][0] + cont1 = sample_data.content # create (corrupted) content with same sha1{,_git} but != sha256 sha256_array = bytearray(cont1.sha256) sha256_array[0] += 1 cont1b = attr.evolve(cont1, sha256=bytes(sha256_array)) with pytest.raises(HashCollision) as cm: swh_storage.content_add([cont1, cont1b]) exc = cm.value actual_algo = exc.algo assert actual_algo in ["sha1", "sha1_git", "blake2s256"] actual_id = exc.hash_id assert actual_id == getattr(cont1, actual_algo).hex() collisions = exc.args[2] assert len(collisions) == 2 assert collisions == [ content_hex_hashes(cont1.hashes()), content_hex_hashes(cont1b.hashes()), ] assert exc.colliding_content_hashes() == [ cont1.hashes(), cont1b.hashes(), ] def test_content_add_duplicate(self, swh_storage, sample_data): - cont = sample_data["content"][0] + cont = sample_data.content swh_storage.content_add([cont, cont]) assert list(swh_storage.content_get([cont.sha1])) == [ {"sha1": cont.sha1, "data": cont.data} ] def test_content_update(self, swh_storage, sample_data): - cont1 = sample_data["content"][0] + cont1 = sample_data.content if hasattr(swh_storage, "journal_writer"): swh_storage.journal_writer.journal = None # TODO, not supported swh_storage.content_add([cont1]) # alter the sha1_git for example cont1b = attr.evolve( cont1, sha1_git=hash_to_bytes("3a60a5275d0333bf13468e8b3dcab90f4046e654") ) swh_storage.content_update([cont1b.to_dict()], keys=["sha1_git"]) results = swh_storage.content_get_metadata([cont1.sha1]) expected_content = attr.evolve(cont1b, data=None).to_dict() del expected_content["ctime"] assert tuple(results[cont1.sha1]) == (expected_content,) def test_content_add_metadata(self, swh_storage, sample_data): - cont = attr.evolve(sample_data["content"][0], data=None, ctime=now()) + cont = attr.evolve(sample_data.content, data=None, ctime=now()) actual_result = swh_storage.content_add_metadata([cont]) assert actual_result == { "content:add": 1, } expected_cont = cont.to_dict() del expected_cont["ctime"] assert tuple(swh_storage.content_get_metadata([cont.sha1])[cont.sha1]) == ( expected_cont, ) contents = [ obj for (obj_type, obj) in swh_storage.journal_writer.journal.objects if obj_type == "content" ] assert len(contents) == 1 for obj in contents: obj = attr.evolve(obj, ctime=None) assert obj == cont def test_content_add_metadata_different_input(self, swh_storage, sample_data): - contents = sample_data["content"][:2] + contents = sample_data.contents[:2] cont = attr.evolve(contents[0], data=None, ctime=now()) cont2 = attr.evolve(contents[1], data=None, ctime=now()) actual_result = swh_storage.content_add_metadata([cont, cont2]) assert actual_result == { "content:add": 2, } def test_content_add_metadata_collision(self, swh_storage, sample_data): - cont1 = attr.evolve(sample_data["content"][0], data=None, ctime=now()) + cont1 = attr.evolve(sample_data.content, data=None, ctime=now()) # create (corrupted) content with same sha1{,_git} but != sha256 sha1_git_array = bytearray(cont1.sha256) sha1_git_array[0] += 1 cont1b = attr.evolve(cont1, sha256=bytes(sha1_git_array)) with pytest.raises(HashCollision) as cm: swh_storage.content_add_metadata([cont1, cont1b]) exc = cm.value actual_algo = exc.algo assert actual_algo in ["sha1", "sha1_git", "blake2s256"] actual_id = exc.hash_id assert actual_id == getattr(cont1, actual_algo).hex() collisions = exc.args[2] assert len(collisions) == 2 assert collisions == [ content_hex_hashes(cont1.hashes()), content_hex_hashes(cont1b.hashes()), ] assert exc.colliding_content_hashes() == [ cont1.hashes(), cont1b.hashes(), ] def test_skipped_content_add(self, swh_storage, sample_data): - contents = sample_data["skipped_content"][:2] + contents = sample_data.skipped_contents[:2] cont = contents[0] cont2 = attr.evolve(contents[1], blake2s256=None) contents_dict = [c.to_dict() for c in [cont, cont2]] missing = list(swh_storage.skipped_content_missing(contents_dict)) assert missing == [cont.hashes(), cont2.hashes()] actual_result = swh_storage.skipped_content_add([cont, cont, cont2]) assert 2 <= actual_result.pop("skipped_content:add") <= 3 assert actual_result == {} missing = list(swh_storage.skipped_content_missing(contents_dict)) assert missing == [] def test_skipped_content_add_missing_hashes(self, swh_storage, sample_data): cont, cont2 = [ - attr.evolve(c, sha1_git=None) for c in sample_data["skipped_content"][:2] + attr.evolve(c, sha1_git=None) for c in sample_data.skipped_contents[:2] ] contents_dict = [c.to_dict() for c in [cont, cont2]] missing = list(swh_storage.skipped_content_missing(contents_dict)) assert len(missing) == 2 actual_result = swh_storage.skipped_content_add([cont, cont, cont2]) assert 2 <= actual_result.pop("skipped_content:add") <= 3 assert actual_result == {} missing = list(swh_storage.skipped_content_missing(contents_dict)) assert missing == [] def test_skipped_content_missing_partial_hash(self, swh_storage, sample_data): - cont = sample_data["skipped_content"][0] + cont = sample_data.skipped_content cont2 = attr.evolve(cont, sha1_git=None) contents_dict = [c.to_dict() for c in [cont, cont2]] missing = list(swh_storage.skipped_content_missing(contents_dict)) assert len(missing) == 2 actual_result = swh_storage.skipped_content_add([cont]) assert actual_result.pop("skipped_content:add") == 1 assert actual_result == {} missing = list(swh_storage.skipped_content_missing(contents_dict)) assert missing == [cont2.hashes()] @pytest.mark.property_based @settings(deadline=None) # this test is very slow @given( strategies.sets( elements=strategies.sampled_from(["sha256", "sha1_git", "blake2s256"]), min_size=0, ) ) - def test_content_missing(self, swh_storage, algos): + def test_content_missing(self, swh_storage, sample_data, algos): algos |= {"sha1"} - content, missing_content = [data.content2, data.skipped_content] + content, missing_content = [sample_data.content2, sample_data.skipped_content] swh_storage.content_add([content]) test_contents = [content.to_dict()] missing_per_hash = defaultdict(list) for i in range(256): test_content = missing_content.to_dict() for hash in algos: test_content[hash] = bytes([i]) + test_content[hash][1:] missing_per_hash[hash].append(test_content[hash]) test_contents.append(test_content) assert set(swh_storage.content_missing(test_contents)) == set( missing_per_hash["sha1"] ) for hash in algos: assert set( swh_storage.content_missing(test_contents, key_hash=hash) ) == set(missing_per_hash[hash]) @pytest.mark.property_based @given( strategies.sets( elements=strategies.sampled_from(["sha256", "sha1_git", "blake2s256"]), min_size=0, ) ) - def test_content_missing_unknown_algo(self, swh_storage, algos): + def test_content_missing_unknown_algo(self, swh_storage, sample_data, algos): algos |= {"sha1"} - content, missing_content = [data.content2, data.skipped_content] + content, missing_content = [sample_data.content2, sample_data.skipped_content] swh_storage.content_add([content]) test_contents = [content.to_dict()] missing_per_hash = defaultdict(list) for i in range(16): test_content = missing_content.to_dict() for hash in algos: test_content[hash] = bytes([i]) + test_content[hash][1:] missing_per_hash[hash].append(test_content[hash]) test_content["nonexisting_algo"] = b"\x00" test_contents.append(test_content) assert set(swh_storage.content_missing(test_contents)) == set( missing_per_hash["sha1"] ) for hash in algos: assert set( swh_storage.content_missing(test_contents, key_hash=hash) ) == set(missing_per_hash[hash]) def test_content_missing_per_sha1(self, swh_storage, sample_data): # given - cont = sample_data["content"][0] - missing_cont = sample_data["skipped_content"][0] + cont = sample_data.content + missing_cont = sample_data.skipped_content swh_storage.content_add([cont]) # when gen = swh_storage.content_missing_per_sha1([cont.sha1, missing_cont.sha1]) # then assert list(gen) == [missing_cont.sha1] def test_content_missing_per_sha1_git(self, swh_storage, sample_data): - cont, cont2 = sample_data["content"][:2] - missing_cont = sample_data["skipped_content"][0] + cont, cont2 = sample_data.contents[:2] + missing_cont = sample_data.skipped_content swh_storage.content_add([cont, cont2]) contents = [cont.sha1_git, cont2.sha1_git, missing_cont.sha1_git] missing_contents = swh_storage.content_missing_per_sha1_git(contents) assert list(missing_contents) == [missing_cont.sha1_git] def test_content_get_partition(self, swh_storage, swh_contents): """content_get_partition paginates results if limit exceeded""" expected_contents = [c.to_dict() for c in swh_contents if c.status != "absent"] actual_contents = [] for i in range(16): actual_result = swh_storage.content_get_partition(i, 16) assert actual_result["next_page_token"] is None actual_contents.extend(actual_result["contents"]) assert_contents_ok(expected_contents, actual_contents, ["sha1"]) def test_content_get_partition_full(self, swh_storage, swh_contents): """content_get_partition for a single partition returns all available contents""" expected_contents = [c.to_dict() for c in swh_contents if c.status != "absent"] actual_result = swh_storage.content_get_partition(0, 1) assert actual_result["next_page_token"] is None actual_contents = actual_result["contents"] assert_contents_ok(expected_contents, actual_contents, ["sha1"]) def test_content_get_partition_empty(self, swh_storage, swh_contents): """content_get_partition when at least one of the partitions is empty""" expected_contents = { cont.sha1 for cont in swh_contents if cont.status != "absent" } # nb_partitions = smallest power of 2 such that at least one of # the partitions is empty nb_partitions = 1 << math.floor(math.log2(len(swh_contents)) + 1) seen_sha1s = [] for i in range(nb_partitions): actual_result = swh_storage.content_get_partition( i, nb_partitions, limit=len(swh_contents) + 1 ) for cont in actual_result["contents"]: seen_sha1s.append(cont["sha1"]) # Limit is higher than the max number of results assert actual_result["next_page_token"] is None assert set(seen_sha1s) == expected_contents def test_content_get_partition_limit_none(self, swh_storage): """content_get_partition call with wrong limit input should fail""" with pytest.raises(StorageArgumentException) as e: swh_storage.content_get_partition(1, 16, limit=None) assert e.value.args == ("limit should not be None",) def test_generate_content_get_partition_pagination(self, swh_storage, swh_contents): """content_get_partition returns contents within range provided""" expected_contents = [c.to_dict() for c in swh_contents if c.status != "absent"] # retrieve contents actual_contents = [] for i in range(4): page_token = None while True: actual_result = swh_storage.content_get_partition( i, 4, limit=3, page_token=page_token ) actual_contents.extend(actual_result["contents"]) page_token = actual_result["next_page_token"] if page_token is None: break assert_contents_ok(expected_contents, actual_contents, ["sha1"]) def test_content_get_metadata(self, swh_storage, sample_data): - cont1, cont2 = sample_data["content"][:2] + cont1, cont2 = sample_data.contents[:2] swh_storage.content_add([cont1, cont2]) actual_md = swh_storage.content_get_metadata([cont1.sha1, cont2.sha1]) # we only retrieve the metadata so no data nor ctime within expected_cont1, expected_cont2 = [ attr.evolve(c, data=None).to_dict() for c in [cont1, cont2] ] expected_cont1.pop("ctime") expected_cont2.pop("ctime") assert tuple(actual_md[cont1.sha1]) == (expected_cont1,) assert tuple(actual_md[cont2.sha1]) == (expected_cont2,) assert len(actual_md.keys()) == 2 def test_content_get_metadata_missing_sha1(self, swh_storage, sample_data): - cont1, cont2 = sample_data["content"][:2] - missing_cont = sample_data["skipped_content"][0] + cont1, cont2 = sample_data.contents[:2] + missing_cont = sample_data.skipped_content swh_storage.content_add([cont1, cont2]) actual_contents = swh_storage.content_get_metadata([missing_cont.sha1]) assert len(actual_contents) == 1 assert tuple(actual_contents[missing_cont.sha1]) == () def test_content_get_random(self, swh_storage, sample_data): - cont, cont2, cont3 = sample_data["content"][:3] + cont, cont2, cont3 = sample_data.contents[:3] swh_storage.content_add([cont, cont2, cont3]) assert swh_storage.content_get_random() in { cont.sha1_git, cont2.sha1_git, cont3.sha1_git, } def test_directory_add(self, swh_storage, sample_data): - directory = sample_data["directory"][1] + directory = sample_data.directories[1] init_missing = list(swh_storage.directory_missing([directory.id])) assert [directory.id] == init_missing actual_result = swh_storage.directory_add([directory]) assert actual_result == {"directory:add": 1} assert list(swh_storage.journal_writer.journal.objects) == [ ("directory", directory) ] actual_data = list(swh_storage.directory_ls(directory.id)) expected_data = list(transform_entries(directory)) assert sorted(expected_data, key=cmpdir) == sorted(actual_data, key=cmpdir) after_missing = list(swh_storage.directory_missing([directory.id])) assert after_missing == [] swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["directory"] == 1 def test_directory_add_from_generator(self, swh_storage, sample_data): - directory = sample_data["directory"][1] + directory = sample_data.directories[1] def _dir_gen(): yield directory actual_result = swh_storage.directory_add(directories=_dir_gen()) assert actual_result == {"directory:add": 1} assert list(swh_storage.journal_writer.journal.objects) == [ ("directory", directory) ] swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["directory"] == 1 def test_directory_add_twice(self, swh_storage, sample_data): - directory = sample_data["directory"][1] + directory = sample_data.directories[1] actual_result = swh_storage.directory_add([directory]) assert actual_result == {"directory:add": 1} assert list(swh_storage.journal_writer.journal.objects) == [ ("directory", directory) ] actual_result = swh_storage.directory_add([directory]) assert actual_result == {"directory:add": 0} assert list(swh_storage.journal_writer.journal.objects) == [ ("directory", directory) ] def test_directory_get_recursive(self, swh_storage, sample_data): - dir1, dir2, dir3 = sample_data["directory"][:3] + dir1, dir2, dir3 = sample_data.directories[:3] init_missing = list(swh_storage.directory_missing([dir1.id])) assert init_missing == [dir1.id] actual_result = swh_storage.directory_add([dir1, dir2, dir3]) assert actual_result == {"directory:add": 3} assert list(swh_storage.journal_writer.journal.objects) == [ ("directory", dir1), ("directory", dir2), ("directory", dir3), ] # List directory containing a file and an unknown subdirectory actual_data = list(swh_storage.directory_ls(dir1.id, recursive=True)) expected_data = list(transform_entries(dir1)) assert sorted(expected_data, key=cmpdir) == sorted(actual_data, key=cmpdir) # List directory containing a file and an unknown subdirectory actual_data = list(swh_storage.directory_ls(dir2.id, recursive=True)) expected_data = list(transform_entries(dir2)) assert sorted(expected_data, key=cmpdir) == sorted(actual_data, key=cmpdir) # List directory containing a known subdirectory, entries should # be both those of the directory and of the subdir actual_data = list(swh_storage.directory_ls(dir3.id, recursive=True)) expected_data = list( itertools.chain( transform_entries(dir3), transform_entries(dir2, prefix=b"subdir/"), ) ) assert sorted(expected_data, key=cmpdir) == sorted(actual_data, key=cmpdir) def test_directory_get_non_recursive(self, swh_storage, sample_data): - dir1, dir2, dir3 = sample_data["directory"][:3] + dir1, dir2, dir3 = sample_data.directories[:3] init_missing = list(swh_storage.directory_missing([dir1.id])) assert init_missing == [dir1.id] actual_result = swh_storage.directory_add([dir1, dir2, dir3]) assert actual_result == {"directory:add": 3} assert list(swh_storage.journal_writer.journal.objects) == [ ("directory", dir1), ("directory", dir2), ("directory", dir3), ] # List directory containing a file and an unknown subdirectory actual_data = list(swh_storage.directory_ls(dir1.id)) expected_data = list(transform_entries(dir1)) assert sorted(expected_data, key=cmpdir) == sorted(actual_data, key=cmpdir) # List directory contaiining a single file actual_data = list(swh_storage.directory_ls(dir2.id)) expected_data = list(transform_entries(dir2)) assert sorted(expected_data, key=cmpdir) == sorted(actual_data, key=cmpdir) # List directory containing a known subdirectory, entries should # only be those of the parent directory, not of the subdir actual_data = list(swh_storage.directory_ls(dir3.id)) expected_data = list(transform_entries(dir3)) assert sorted(expected_data, key=cmpdir) == sorted(actual_data, key=cmpdir) def test_directory_entry_get_by_path(self, swh_storage, sample_data): - cont = sample_data["content"][0] - dir1, dir2, dir3, dir4, dir5 = sample_data["directory"][:5] + cont = sample_data.content + dir1, dir2, dir3, dir4, dir5 = sample_data.directories[:5] # given init_missing = list(swh_storage.directory_missing([dir3.id])) assert init_missing == [dir3.id] actual_result = swh_storage.directory_add([dir3, dir4]) assert actual_result == {"directory:add": 2} expected_entries = [ { "dir_id": dir3.id, "name": b"foo", "type": "file", "target": cont.sha1_git, "sha1": None, "sha1_git": None, "sha256": None, "status": None, "perms": from_disk.DentryPerms.content, "length": None, }, { "dir_id": dir3.id, "name": b"subdir", "type": "dir", "target": dir2.id, "sha1": None, "sha1_git": None, "sha256": None, "status": None, "perms": from_disk.DentryPerms.directory, "length": None, }, { "dir_id": dir3.id, "name": b"hello", "type": "file", "target": dir5.id, "sha1": None, "sha1_git": None, "sha256": None, "status": None, "perms": from_disk.DentryPerms.content, "length": None, }, ] # when (all must be found here) for entry, expected_entry in zip(dir3.entries, expected_entries): actual_entry = swh_storage.directory_entry_get_by_path( dir3.id, [entry.name] ) assert actual_entry == expected_entry # same, but deeper for entry, expected_entry in zip(dir3.entries, expected_entries): actual_entry = swh_storage.directory_entry_get_by_path( dir4.id, [b"subdir1", entry.name] ) expected_entry = expected_entry.copy() expected_entry["name"] = b"subdir1/" + expected_entry["name"] assert actual_entry == expected_entry # when (nothing should be found here since `dir` is not persisted.) for entry in dir2.entries: actual_entry = swh_storage.directory_entry_get_by_path( dir2.id, [entry.name] ) assert actual_entry is None def test_directory_get_random(self, swh_storage, sample_data): - dir1, dir2, dir3 = sample_data["directory"][:3] + dir1, dir2, dir3 = sample_data.directories[:3] swh_storage.directory_add([dir1, dir2, dir3]) assert swh_storage.directory_get_random() in { dir1.id, dir2.id, dir3.id, } def test_revision_add(self, swh_storage, sample_data): - revision = sample_data["revision"][0] + revision = sample_data.revision init_missing = swh_storage.revision_missing([revision.id]) assert list(init_missing) == [revision.id] actual_result = swh_storage.revision_add([revision]) assert actual_result == {"revision:add": 1} end_missing = swh_storage.revision_missing([revision.id]) assert list(end_missing) == [] assert list(swh_storage.journal_writer.journal.objects) == [ ("revision", revision) ] # already there so nothing added actual_result = swh_storage.revision_add([revision]) assert actual_result == {"revision:add": 0} swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["revision"] == 1 def test_revision_add_from_generator(self, swh_storage, sample_data): - revision = sample_data["revision"][0] + revision = sample_data.revision def _rev_gen(): yield revision actual_result = swh_storage.revision_add(_rev_gen()) assert actual_result == {"revision:add": 1} swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["revision"] == 1 def test_revision_add_twice(self, swh_storage, sample_data): - revision, revision2 = sample_data["revision"][:2] + revision, revision2 = sample_data.revisions[:2] actual_result = swh_storage.revision_add([revision]) assert actual_result == {"revision:add": 1} assert list(swh_storage.journal_writer.journal.objects) == [ ("revision", revision) ] actual_result = swh_storage.revision_add([revision, revision2]) assert actual_result == {"revision:add": 1} assert list(swh_storage.journal_writer.journal.objects) == [ ("revision", revision), ("revision", revision2), ] def test_revision_add_name_clash(self, swh_storage, sample_data): - revision, revision2 = sample_data["revision"][:2] + revision, revision2 = sample_data.revisions[:2] revision1 = attr.evolve( revision, author=Person( fullname=b"John Doe ", name=b"John Doe", email=b"john.doe@example.com", ), ) revision2 = attr.evolve( revision2, author=Person( fullname=b"John Doe ", name=b"John Doe ", email=b"john.doe@example.com ", ), ) actual_result = swh_storage.revision_add([revision1, revision2]) assert actual_result == {"revision:add": 2} def test_revision_get_order(self, swh_storage, sample_data): - revision, revision2 = sample_data["revision"][:2] + revision, revision2 = sample_data.revisions[:2] add_result = swh_storage.revision_add([revision, revision2]) assert add_result == {"revision:add": 2} # order 1 res1 = swh_storage.revision_get([revision.id, revision2.id]) assert [Revision.from_dict(r) for r in res1] == [revision, revision2] # order 2 res2 = swh_storage.revision_get([revision2.id, revision.id]) assert [Revision.from_dict(r) for r in res2] == [revision2, revision] def test_revision_log(self, swh_storage, sample_data): - revision1, revision2, revision3, revision4 = sample_data["revision"][:4] + revision1, revision2, revision3, revision4 = sample_data.revisions[:4] # rev4 -is-child-of-> rev3 -> rev1, (rev2 -> rev1) swh_storage.revision_add([revision1, revision2, revision3, revision4]) # when results = list(swh_storage.revision_log([revision4.id])) # for comparison purposes actual_results = [Revision.from_dict(r) for r in results] assert len(actual_results) == 4 # rev4 -child-> rev3 -> rev1, (rev2 -> rev1) assert actual_results == [revision4, revision3, revision1, revision2] def test_revision_log_with_limit(self, swh_storage, sample_data): - revision1, revision2, revision3, revision4 = sample_data["revision"][:4] + revision1, revision2, revision3, revision4 = sample_data.revisions[:4] # revision4 -is-child-of-> revision3 swh_storage.revision_add([revision3, revision4]) results = list(swh_storage.revision_log([revision4.id], 1)) actual_results = [Revision.from_dict(r) for r in results] assert len(actual_results) == 1 assert actual_results[0] == revision4 def test_revision_log_unknown_revision(self, swh_storage, sample_data): - revision = sample_data["revision"][0] + revision = sample_data.revision rev_log = list(swh_storage.revision_log([revision.id])) assert rev_log == [] def test_revision_shortlog(self, swh_storage, sample_data): - revision1, revision2, revision3, revision4 = sample_data["revision"][:4] + revision1, revision2, revision3, revision4 = sample_data.revisions[:4] # rev4 -is-child-of-> rev3 -> (rev1, rev2); rev2 -> rev1 swh_storage.revision_add([revision1, revision2, revision3, revision4]) results = list(swh_storage.revision_shortlog([revision4.id])) actual_results = [[id, tuple(parents)] for (id, parents) in results] assert len(actual_results) == 4 assert actual_results == [ [revision4.id, revision4.parents], [revision3.id, revision3.parents], [revision1.id, revision1.parents], [revision2.id, revision2.parents], ] def test_revision_shortlog_with_limit(self, swh_storage, sample_data): - revision1, revision2, revision3, revision4 = sample_data["revision"][:4] + revision1, revision2, revision3, revision4 = sample_data.revisions[:4] # revision4 -is-child-of-> revision3 swh_storage.revision_add([revision1, revision2, revision3, revision4]) results = list(swh_storage.revision_shortlog([revision4.id], 1)) actual_results = [[id, tuple(parents)] for (id, parents) in results] assert len(actual_results) == 1 assert list(actual_results[0]) == [revision4.id, revision4.parents] def test_revision_get(self, swh_storage, sample_data): - revision, revision2 = sample_data["revision"][:2] + revision, revision2 = sample_data.revisions[:2] swh_storage.revision_add([revision]) actual_revisions = list(swh_storage.revision_get([revision.id, revision2.id])) assert len(actual_revisions) == 2 assert Revision.from_dict(actual_revisions[0]) == revision assert actual_revisions[1] is None def test_revision_get_no_parents(self, swh_storage, sample_data): - revision = sample_data["revision"][0] + revision = sample_data.revision swh_storage.revision_add([revision]) get = list(swh_storage.revision_get([revision.id])) assert len(get) == 1 assert revision.parents == () assert tuple(get[0]["parents"]) == () # no parents on this one def test_revision_get_random(self, swh_storage, sample_data): - revision1, revision2, revision3 = sample_data["revision"][:3] + revision1, revision2, revision3 = sample_data.revisions[:3] swh_storage.revision_add([revision1, revision2, revision3]) assert swh_storage.revision_get_random() in { revision1.id, revision2.id, revision3.id, } def test_release_add(self, swh_storage, sample_data): - release, release2 = sample_data["release"][:2] + release, release2 = sample_data.releases[:2] init_missing = swh_storage.release_missing([release.id, release2.id]) assert list(init_missing) == [release.id, release2.id] actual_result = swh_storage.release_add([release, release2]) assert actual_result == {"release:add": 2} end_missing = swh_storage.release_missing([release.id, release2.id]) assert list(end_missing) == [] assert list(swh_storage.journal_writer.journal.objects) == [ ("release", release), ("release", release2), ] # already present so nothing added actual_result = swh_storage.release_add([release, release2]) assert actual_result == {"release:add": 0} swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["release"] == 2 def test_release_add_from_generator(self, swh_storage, sample_data): - release, release2 = sample_data["release"][:2] + release, release2 = sample_data.releases[:2] def _rel_gen(): yield release yield release2 actual_result = swh_storage.release_add(_rel_gen()) assert actual_result == {"release:add": 2} assert list(swh_storage.journal_writer.journal.objects) == [ ("release", release), ("release", release2), ] swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["release"] == 2 def test_release_add_no_author_date(self, swh_storage, sample_data): - full_release = sample_data["release"][0] + full_release = sample_data.release release = attr.evolve(full_release, author=None, date=None) actual_result = swh_storage.release_add([release]) assert actual_result == {"release:add": 1} end_missing = swh_storage.release_missing([release.id]) assert list(end_missing) == [] assert list(swh_storage.journal_writer.journal.objects) == [ ("release", release) ] def test_release_add_twice(self, swh_storage, sample_data): - release, release2 = sample_data["release"][:2] + release, release2 = sample_data.releases[:2] actual_result = swh_storage.release_add([release]) assert actual_result == {"release:add": 1} assert list(swh_storage.journal_writer.journal.objects) == [ ("release", release) ] actual_result = swh_storage.release_add([release, release2, release, release2]) assert actual_result == {"release:add": 1} assert set(swh_storage.journal_writer.journal.objects) == set( [("release", release), ("release", release2),] ) def test_release_add_name_clash(self, swh_storage, sample_data): release, release2 = [ attr.evolve( c, author=Person( fullname=b"John Doe ", name=b"John Doe", email=b"john.doe@example.com", ), ) - for c in sample_data["release"][:2] + for c in sample_data.releases[:2] ] actual_result = swh_storage.release_add([release, release2]) assert actual_result == {"release:add": 2} def test_release_get(self, swh_storage, sample_data): - release, release2, release3 = sample_data["release"][:3] + release, release2, release3 = sample_data.releases[:3] # given swh_storage.release_add([release, release2]) # when releases = list(swh_storage.release_get([release.id, release2.id])) actual_releases = [Release.from_dict(r) for r in releases] # then assert actual_releases == [release, release2] unknown_releases = list(swh_storage.release_get([release3.id])) assert unknown_releases[0] is None def test_release_get_order(self, swh_storage, sample_data): - release, release2 = sample_data["release"][:2] + release, release2 = sample_data.releases[:2] add_result = swh_storage.release_add([release, release2]) assert add_result == {"release:add": 2} # order 1 res1 = swh_storage.release_get([release.id, release2.id]) assert list(res1) == [release.to_dict(), release2.to_dict()] # order 2 res2 = swh_storage.release_get([release2.id, release.id]) assert list(res2) == [release2.to_dict(), release.to_dict()] def test_release_get_random(self, swh_storage, sample_data): - release, release2, release3 = sample_data["release"][:3] + release, release2, release3 = sample_data.releases[:3] swh_storage.release_add([release, release2, release3]) assert swh_storage.release_get_random() in { release.id, release2.id, release3.id, } def test_origin_add(self, swh_storage, sample_data): - origin, origin2 = sample_data["origin"][:2] + origin, origin2 = sample_data.origins[:2] origin_dict, origin2_dict = [o.to_dict() for o in [origin, origin2]] assert swh_storage.origin_get([origin_dict])[0] is None stats = swh_storage.origin_add([origin, origin2]) assert stats == {"origin:add": 2} actual_origin = swh_storage.origin_get([origin_dict])[0] assert actual_origin["url"] == origin.url actual_origin2 = swh_storage.origin_get([origin2_dict])[0] assert actual_origin2["url"] == origin2.url assert set(swh_storage.journal_writer.journal.objects) == set( [("origin", origin), ("origin", origin2),] ) swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["origin"] == 2 def test_origin_add_from_generator(self, swh_storage, sample_data): - origin, origin2 = sample_data["origin"][:2] + origin, origin2 = sample_data.origins[:2] origin_dict, origin2_dict = [o.to_dict() for o in [origin, origin2]] def _ori_gen(): yield origin yield origin2 stats = swh_storage.origin_add(_ori_gen()) assert stats == {"origin:add": 2} actual_origin = swh_storage.origin_get([origin_dict])[0] assert actual_origin["url"] == origin.url actual_origin2 = swh_storage.origin_get([origin2_dict])[0] assert actual_origin2["url"] == origin2.url assert set(swh_storage.journal_writer.journal.objects) == set( [("origin", origin), ("origin", origin2),] ) swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["origin"] == 2 def test_origin_add_twice(self, swh_storage, sample_data): - origin, origin2 = sample_data["origin"][:2] + origin, origin2 = sample_data.origins[:2] origin_dict, origin2_dict = [o.to_dict() for o in [origin, origin2]] add1 = swh_storage.origin_add([origin, origin2]) assert set(swh_storage.journal_writer.journal.objects) == set( [("origin", origin), ("origin", origin2),] ) assert add1 == {"origin:add": 2} add2 = swh_storage.origin_add([origin, origin2]) assert set(swh_storage.journal_writer.journal.objects) == set( [("origin", origin), ("origin", origin2),] ) assert add2 == {"origin:add": 0} def test_origin_get_legacy(self, swh_storage, sample_data): - origin, origin2 = sample_data["origin"][:2] + origin, origin2 = sample_data.origins[:2] origin_dict, origin2_dict = [o.to_dict() for o in [origin, origin2]] assert swh_storage.origin_get(origin_dict) is None swh_storage.origin_add([origin]) actual_origin0 = swh_storage.origin_get(origin_dict) assert actual_origin0["url"] == origin.url def test_origin_get(self, swh_storage, sample_data): - origin, origin2 = sample_data["origin"][:2] + origin, origin2 = sample_data.origins[:2] origin_dict, origin2_dict = [o.to_dict() for o in [origin, origin2]] assert swh_storage.origin_get(origin_dict) is None assert swh_storage.origin_get([origin_dict]) == [None] swh_storage.origin_add([origin]) actual_origins = swh_storage.origin_get([origin_dict]) assert len(actual_origins) == 1 actual_origin0 = swh_storage.origin_get(origin_dict) assert actual_origin0 == actual_origins[0] assert actual_origin0["url"] == origin.url actual_origins = swh_storage.origin_get([origin_dict, {"url": "not://exists"}]) assert actual_origins == [origin_dict, None] def _generate_random_visits(self, nb_visits=100, start=0, end=7): """Generate random visits within the last 2 months (to avoid computations) """ visits = [] today = now() for weeks in range(nb_visits, 0, -1): hours = random.randint(0, 24) minutes = random.randint(0, 60) seconds = random.randint(0, 60) days = random.randint(0, 28) weeks = random.randint(start, end) date_visit = today - timedelta( weeks=weeks, hours=hours, minutes=minutes, seconds=seconds, days=days ) visits.append(date_visit) return visits def test_origin_visit_get_all(self, swh_storage, sample_data): - origin = sample_data["origin"][0] + origin = sample_data.origin swh_storage.origin_add([origin]) visits = swh_storage.origin_visit_add( [ OriginVisit( - origin=origin.url, date=data.date_visit1, type=data.type_visit1, + origin=origin.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ), OriginVisit( - origin=origin.url, date=data.date_visit2, type=data.type_visit2, + origin=origin.url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ), OriginVisit( - origin=origin.url, date=data.date_visit2, type=data.type_visit2, + origin=origin.url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ), ] ) ov1, ov2, ov3 = [ {**v.to_dict(), "status": "created", "snapshot": None, "metadata": None,} for v in visits ] # order asc, no pagination, no limit all_visits = list(swh_storage.origin_visit_get(origin.url)) assert all_visits == [ov1, ov2, ov3] # order asc, no pagination, limit all_visits2 = list(swh_storage.origin_visit_get(origin.url, limit=2)) assert all_visits2 == [ov1, ov2] # order asc, pagination, no limit all_visits3 = list( swh_storage.origin_visit_get(origin.url, last_visit=ov1["visit"]) ) assert all_visits3 == [ov2, ov3] # order asc, pagination, limit all_visits4 = list( swh_storage.origin_visit_get(origin.url, last_visit=ov2["visit"], limit=1) ) assert all_visits4 == [ov3] # order desc, no pagination, no limit all_visits5 = list(swh_storage.origin_visit_get(origin.url, order="desc")) assert all_visits5 == [ov3, ov2, ov1] # order desc, no pagination, limit all_visits6 = list( swh_storage.origin_visit_get(origin.url, limit=2, order="desc") ) assert all_visits6 == [ov3, ov2] # order desc, pagination, no limit all_visits7 = list( swh_storage.origin_visit_get( origin.url, last_visit=ov3["visit"], order="desc" ) ) assert all_visits7 == [ov2, ov1] # order desc, pagination, limit all_visits8 = list( swh_storage.origin_visit_get( origin.url, last_visit=ov3["visit"], order="desc", limit=1 ) ) assert all_visits8 == [ov2] def test_origin_visit_get__unknown_origin(self, swh_storage): assert [] == list(swh_storage.origin_visit_get("foo")) def test_origin_visit_get_random(self, swh_storage, sample_data): - origins = sample_data["origin"][:2] + origins = sample_data.origins[:2] swh_storage.origin_add(origins) # Add some random visits within the selection range visits = self._generate_random_visits() visit_type = "git" # Add visits to those origins for origin in origins: for date_visit in visits: visit = swh_storage.origin_visit_add( [OriginVisit(origin=origin.url, date=date_visit, type=visit_type,)] )[0] swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=visit.visit, date=now(), status="full", snapshot=None, ) ] ) swh_storage.refresh_stat_counters() stats = swh_storage.stat_counters() assert stats["origin"] == len(origins) assert stats["origin_visit"] == len(origins) * len(visits) random_origin_visit = swh_storage.origin_visit_get_random(visit_type) assert random_origin_visit assert random_origin_visit["origin"] is not None assert random_origin_visit["origin"] in [o.url for o in origins] def test_origin_visit_get_random_nothing_found(self, swh_storage, sample_data): - origins = sample_data["origin"] + origins = sample_data.origins swh_storage.origin_add(origins) visit_type = "hg" # Add some visits outside of the random generation selection so nothing # will be found by the random selection visits = self._generate_random_visits(nb_visits=3, start=13, end=24) for origin in origins: for date_visit in visits: visit = swh_storage.origin_visit_add( [OriginVisit(origin=origin.url, date=date_visit, type=visit_type,)] )[0] swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=visit.visit, date=now(), status="full", snapshot=None, ) ] ) random_origin_visit = swh_storage.origin_visit_get_random(visit_type) assert random_origin_visit is None def test_origin_get_by_sha1(self, swh_storage, sample_data): - origin = sample_data["origin"][0] + origin = sample_data.origin assert swh_storage.origin_get(origin.to_dict()) is None swh_storage.origin_add([origin]) origins = list(swh_storage.origin_get_by_sha1([sha1(origin.url)])) assert len(origins) == 1 assert origins[0]["url"] == origin.url def test_origin_get_by_sha1_not_found(self, swh_storage, sample_data): - origin = sample_data["origin"][0] + origin = sample_data.origin assert swh_storage.origin_get(origin.to_dict()) is None origins = list(swh_storage.origin_get_by_sha1([sha1(origin.url)])) assert len(origins) == 1 assert origins[0] is None def test_origin_search_single_result(self, swh_storage, sample_data): - origin, origin2 = sample_data["origin"][:2] + origin, origin2 = sample_data.origins[:2] found_origins = list(swh_storage.origin_search(origin.url)) assert len(found_origins) == 0 found_origins = list(swh_storage.origin_search(origin.url, regexp=True)) assert len(found_origins) == 0 swh_storage.origin_add([origin]) origin_data = origin.to_dict() found_origins = list(swh_storage.origin_search(origin.url)) assert len(found_origins) == 1 assert found_origins[0] == origin_data found_origins = list( swh_storage.origin_search(f".{origin.url[1:-1]}.", regexp=True) ) assert len(found_origins) == 1 assert found_origins[0] == origin_data swh_storage.origin_add([origin2]) origin2_data = origin2.to_dict() found_origins = list(swh_storage.origin_search(origin2.url)) assert len(found_origins) == 1 assert found_origins[0] == origin2_data found_origins = list( swh_storage.origin_search(f".{origin2.url[1:-1]}.", regexp=True) ) assert len(found_origins) == 1 assert found_origins[0] == origin2_data def test_origin_search_no_regexp(self, swh_storage, sample_data): - origin, origin2 = sample_data["origin"][:2] + origin, origin2 = sample_data.origins[:2] origin_dicts = [o.to_dict() for o in [origin, origin2]] swh_storage.origin_add([origin, origin2]) # no pagination found_origins = list(swh_storage.origin_search("/")) assert len(found_origins) == 2 # offset=0 found_origins0 = list(swh_storage.origin_search("/", offset=0, limit=1)) assert len(found_origins0) == 1 assert found_origins0[0] in origin_dicts # offset=1 found_origins1 = list(swh_storage.origin_search("/", offset=1, limit=1)) assert len(found_origins1) == 1 assert found_origins1[0] in origin_dicts # check both origins were returned assert found_origins0 != found_origins1 def test_origin_search_regexp_substring(self, swh_storage, sample_data): - origin, origin2 = sample_data["origin"][:2] + origin, origin2 = sample_data.origins[:2] origin_dicts = [o.to_dict() for o in [origin, origin2]] swh_storage.origin_add([origin, origin2]) # no pagination found_origins = list(swh_storage.origin_search("/", regexp=True)) assert len(found_origins) == 2 # offset=0 found_origins0 = list( swh_storage.origin_search("/", offset=0, limit=1, regexp=True) ) assert len(found_origins0) == 1 assert found_origins0[0] in origin_dicts # offset=1 found_origins1 = list( swh_storage.origin_search("/", offset=1, limit=1, regexp=True) ) assert len(found_origins1) == 1 assert found_origins1[0] in origin_dicts # check both origins were returned assert found_origins0 != found_origins1 def test_origin_search_regexp_fullstring(self, swh_storage, sample_data): - origin, origin2 = sample_data["origin"][:2] + origin, origin2 = sample_data.origins[:2] origin_dicts = [o.to_dict() for o in [origin, origin2]] swh_storage.origin_add([origin, origin2]) # no pagination found_origins = list(swh_storage.origin_search(".*/.*", regexp=True)) assert len(found_origins) == 2 # offset=0 found_origins0 = list( swh_storage.origin_search(".*/.*", offset=0, limit=1, regexp=True) ) assert len(found_origins0) == 1 assert found_origins0[0] in origin_dicts # offset=1 found_origins1 = list( swh_storage.origin_search(".*/.*", offset=1, limit=1, regexp=True) ) assert len(found_origins1) == 1 assert found_origins1[0] in origin_dicts # check both origins were returned assert found_origins0 != found_origins1 def test_origin_visit_add(self, swh_storage, sample_data): - origin1 = sample_data["origin"][1] + origin1 = sample_data.origins[1] swh_storage.origin_add([origin1]) date_visit = now() date_visit2 = date_visit + datetime.timedelta(minutes=1) date_visit = round_to_milliseconds(date_visit) date_visit2 = round_to_milliseconds(date_visit2) visit1 = OriginVisit( - origin=origin1.url, date=date_visit, type=data.type_visit1, + origin=origin1.url, date=date_visit, type=sample_data.type_visit1, ) visit2 = OriginVisit( - origin=origin1.url, date=date_visit2, type=data.type_visit2, + origin=origin1.url, date=date_visit2, type=sample_data.type_visit2, ) # add once ov1, ov2 = swh_storage.origin_visit_add([visit1, visit2]) # then again (will be ignored as they already exist) origin_visit1, origin_visit2 = swh_storage.origin_visit_add([ov1, ov2]) assert ov1 == origin_visit1 assert ov2 == origin_visit2 ovs1 = OriginVisitStatus( origin=origin1.url, visit=ov1.visit, date=date_visit, status="created", snapshot=None, ) ovs2 = OriginVisitStatus( origin=origin1.url, visit=ov2.visit, date=date_visit2, status="created", snapshot=None, ) actual_origin_visits = list(swh_storage.origin_visit_get(origin1.url)) expected_visits = [ {**ovs1.to_dict(), "type": ov1.type}, {**ovs2.to_dict(), "type": ov2.type}, ] assert len(expected_visits) == len(actual_origin_visits) for visit in expected_visits: assert visit in actual_origin_visits actual_objects = list(swh_storage.journal_writer.journal.objects) expected_objects = list( [("origin", origin1)] + [("origin_visit", visit) for visit in [ov1, ov2]] * 2 + [("origin_visit_status", ovs) for ovs in [ovs1, ovs2]] ) for obj in expected_objects: assert obj in actual_objects - def test_origin_visit_add_validation(self, swh_storage): + def test_origin_visit_add_validation(self, swh_storage, sample_data): """Unknown origin when adding visits should raise""" - visit = OriginVisit( - origin="something-unknown", date=now(), type=data.type_visit1, - ) + visit = attr.evolve(sample_data.origin_visit, origin="something-unknonw") with pytest.raises(StorageArgumentException, match="Unknown origin"): swh_storage.origin_visit_add([visit]) objects = list(swh_storage.journal_writer.journal.objects) assert not objects def test_origin_visit_status_add_validation(self, swh_storage): """Wrong origin_visit_status input should raise storage argument error""" date_visit = now() visit_status1 = OriginVisitStatus( origin="unknown-origin-url", visit=10, date=date_visit, status="full", snapshot=None, ) with pytest.raises(StorageArgumentException, match="Unknown origin"): swh_storage.origin_visit_status_add([visit_status1]) objects = list(swh_storage.journal_writer.journal.objects) assert not objects def test_origin_visit_status_add(self, swh_storage, sample_data): """Correct origin visit statuses should add a new visit status """ - snapshot = sample_data["snapshot"][0] - origin1 = sample_data["origin"][1] + snapshot = sample_data.snapshot + origin1 = sample_data.origins[1] origin2 = Origin(url="new-origin") swh_storage.origin_add([origin1, origin2]) ov1, ov2 = swh_storage.origin_visit_add( [ OriginVisit( - origin=origin1.url, date=data.date_visit1, type=data.type_visit1, + origin=origin1.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ), OriginVisit( - origin=origin2.url, date=data.date_visit2, type=data.type_visit2, + origin=origin2.url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ), ] ) ovs1 = OriginVisitStatus( origin=origin1.url, visit=ov1.visit, - date=data.date_visit1, + date=sample_data.date_visit1, status="created", snapshot=None, ) ovs2 = OriginVisitStatus( origin=origin2.url, visit=ov2.visit, - date=data.date_visit2, + date=sample_data.date_visit2, status="created", snapshot=None, ) date_visit_now = now() visit_status1 = OriginVisitStatus( origin=ov1.origin, visit=ov1.visit, date=date_visit_now, status="full", snapshot=snapshot.id, ) date_visit_now = now() visit_status2 = OriginVisitStatus( origin=ov2.origin, visit=ov2.visit, date=date_visit_now, status="ongoing", snapshot=None, metadata={"intrinsic": "something"}, ) swh_storage.origin_visit_status_add([visit_status1, visit_status2]) origin_visit1 = swh_storage.origin_visit_get_latest( origin1.url, require_snapshot=True ) assert origin_visit1 assert origin_visit1["status"] == "full" assert origin_visit1["snapshot"] == snapshot.id origin_visit2 = swh_storage.origin_visit_get_latest( origin2.url, require_snapshot=False ) assert origin2.url != origin1.url assert origin_visit2 assert origin_visit2["status"] == "ongoing" assert origin_visit2["snapshot"] is None assert origin_visit2["metadata"] == {"intrinsic": "something"} actual_objects = list(swh_storage.journal_writer.journal.objects) expected_origins = [origin1, origin2] expected_visits = [ov1, ov2] expected_visit_statuses = [ovs1, ovs2, visit_status1, visit_status2] expected_objects = ( [("origin", o) for o in expected_origins] + [("origin_visit", v) for v in expected_visits] + [("origin_visit_status", ovs) for ovs in expected_visit_statuses] ) for obj in expected_objects: assert obj in actual_objects def test_origin_visit_status_add_twice(self, swh_storage, sample_data): """Correct origin visit statuses should add a new visit status """ - snapshot = sample_data["snapshot"][0] - origin1 = sample_data["origin"][1] + snapshot = sample_data.snapshot + origin1 = sample_data.origins[1] swh_storage.origin_add([origin1]) ov1 = swh_storage.origin_visit_add( [ OriginVisit( - origin=origin1.url, date=data.date_visit1, type=data.type_visit1, + origin=origin1.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ), ] )[0] ovs1 = OriginVisitStatus( origin=origin1.url, visit=ov1.visit, - date=data.date_visit1, + date=sample_data.date_visit1, status="created", snapshot=None, ) date_visit_now = now() visit_status1 = OriginVisitStatus( origin=ov1.origin, visit=ov1.visit, date=date_visit_now, status="full", snapshot=snapshot.id, ) swh_storage.origin_visit_status_add([visit_status1]) # second call will ignore existing entries (will send to storage though) swh_storage.origin_visit_status_add([visit_status1]) origin_visits = list(swh_storage.origin_visit_get(ov1.origin)) assert len(origin_visits) == 1 origin_visit1 = origin_visits[0] assert origin_visit1 assert origin_visit1["status"] == "full" assert origin_visit1["snapshot"] == snapshot.id actual_objects = list(swh_storage.journal_writer.journal.objects) expected_origins = [origin1] expected_visits = [ov1] expected_visit_statuses = [ovs1, visit_status1, visit_status1] # write twice in the journal expected_objects = ( [("origin", o) for o in expected_origins] + [("origin_visit", v) for v in expected_visits] + [("origin_visit_status", ovs) for ovs in expected_visit_statuses] ) for obj in expected_objects: assert obj in actual_objects def test_origin_visit_find_by_date(self, swh_storage, sample_data): - origin = sample_data["origin"][0] + origin = sample_data.origin swh_storage.origin_add([origin]) visit1 = OriginVisit( - origin=origin.url, date=data.date_visit2, type=data.type_visit1, + origin=origin.url, + date=sample_data.date_visit2, + type=sample_data.type_visit1, ) visit2 = OriginVisit( - origin=origin.url, date=data.date_visit3, type=data.type_visit2, + origin=origin.url, + date=sample_data.date_visit3, + type=sample_data.type_visit2, ) visit3 = OriginVisit( - origin=origin.url, date=data.date_visit2, type=data.type_visit3, + origin=origin.url, + date=sample_data.date_visit2, + type=sample_data.type_visit3, ) ov1, ov2, ov3 = swh_storage.origin_visit_add([visit1, visit2, visit3]) ovs1 = OriginVisitStatus( origin=origin.url, visit=ov1.visit, - date=data.date_visit2, + date=sample_data.date_visit2, status="ongoing", snapshot=None, ) ovs2 = OriginVisitStatus( origin=origin.url, visit=ov2.visit, - date=data.date_visit3, + date=sample_data.date_visit3, status="ongoing", snapshot=None, ) ovs3 = OriginVisitStatus( origin=origin.url, visit=ov3.visit, - date=data.date_visit2, + date=sample_data.date_visit2, status="ongoing", snapshot=None, ) swh_storage.origin_visit_status_add([ovs1, ovs2, ovs3]) # Simple case - visit = swh_storage.origin_visit_find_by_date(origin.url, data.date_visit3) + visit = swh_storage.origin_visit_find_by_date( + origin.url, sample_data.date_visit3 + ) assert visit["visit"] == ov2.visit # There are two visits at the same date, the latest must be returned - visit = swh_storage.origin_visit_find_by_date(origin.url, data.date_visit2) + visit = swh_storage.origin_visit_find_by_date( + origin.url, sample_data.date_visit2 + ) assert visit["visit"] == ov3.visit - def test_origin_visit_find_by_date__unknown_origin(self, swh_storage): - swh_storage.origin_visit_find_by_date("foo", data.date_visit2) + def test_origin_visit_find_by_date__unknown_origin(self, swh_storage, sample_data): + swh_storage.origin_visit_find_by_date("foo", sample_data.date_visit2) def test_origin_visit_get_by(self, swh_storage, sample_data): - snapshot = sample_data["snapshot"][0] - origins = sample_data["origin"][:2] + snapshot = sample_data.snapshot + origins = sample_data.origins[:2] swh_storage.origin_add(origins) origin_url, origin_url2 = [o.url for o in origins] visit = OriginVisit( - origin=origin_url, date=data.date_visit2, type=data.type_visit2, + origin=origin_url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ) origin_visit1 = swh_storage.origin_visit_add([visit])[0] swh_storage.snapshot_add([snapshot]) swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin_url, visit=origin_visit1.visit, date=now(), status="ongoing", snapshot=snapshot.id, ) ] ) # Add some other {origin, visit} entries visit2 = OriginVisit( - origin=origin_url, date=data.date_visit3, type=data.type_visit3, + origin=origin_url, + date=sample_data.date_visit3, + type=sample_data.type_visit3, ) visit3 = OriginVisit( - origin=origin_url2, date=data.date_visit3, type=data.type_visit3, + origin=origin_url2, + date=sample_data.date_visit3, + type=sample_data.type_visit3, ) swh_storage.origin_visit_add([visit2, visit3]) # when visit1_metadata = { "contents": 42, "directories": 22, } swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin_url, visit=origin_visit1.visit, date=now(), status="full", snapshot=snapshot.id, metadata=visit1_metadata, ) ] ) expected_origin_visit = origin_visit1.to_dict() expected_origin_visit.update( { "origin": origin_url, "visit": origin_visit1.visit, - "date": data.date_visit2, - "type": data.type_visit2, + "date": sample_data.date_visit2, + "type": sample_data.type_visit2, "metadata": visit1_metadata, "status": "full", "snapshot": snapshot.id, } ) # when actual_origin_visit1 = swh_storage.origin_visit_get_by( origin_url, origin_visit1.visit ) # then assert actual_origin_visit1 == expected_origin_visit def test_origin_visit_get_by__unknown_origin(self, swh_storage): assert swh_storage.origin_visit_get_by("foo", 10) is None def test_origin_visit_get_by_no_result(self, swh_storage, sample_data): - origin = sample_data["origin"][0] + origin = sample_data.origin swh_storage.origin_add([origin]) actual_origin_visit = swh_storage.origin_visit_get_by(origin.url, 999) assert actual_origin_visit is None def test_origin_visit_get_latest_none(self, swh_storage, sample_data): """Origin visit get latest on unknown objects should return nothing """ # unknown origin so no result assert swh_storage.origin_visit_get_latest("unknown-origin") is None # unknown type - origin = sample_data["origin"][0] + origin = sample_data.origin swh_storage.origin_add([origin]) assert swh_storage.origin_visit_get_latest(origin.url, type="unknown") is None def test_origin_visit_get_latest_filter_type(self, swh_storage, sample_data): """Filtering origin visit get latest with filter type should be ok """ - origin = sample_data["origin"][0] + origin = sample_data.origin swh_storage.origin_add([origin]) visit1 = OriginVisit( - origin=origin.url, date=data.date_visit1, type=data.type_visit1, + origin=origin.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ) visit2 = OriginVisit( - origin=origin.url, date=data.date_visit2, type=data.type_visit2, + origin=origin.url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ) # Add a visit with the same date as the previous one visit3 = OriginVisit( - origin=origin.url, date=data.date_visit2, type=data.type_visit2, + origin=origin.url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ) - assert data.type_visit1 != data.type_visit2 - assert data.date_visit1 < data.date_visit2 + assert sample_data.type_visit1 != sample_data.type_visit2 + assert sample_data.date_visit1 < sample_data.date_visit2 ov1, ov2, ov3 = swh_storage.origin_visit_add([visit1, visit2, visit3]) origin_visit1 = swh_storage.origin_visit_get_by(origin.url, ov1.visit) origin_visit3 = swh_storage.origin_visit_get_by(origin.url, ov3.visit) - assert data.type_visit1 != data.type_visit2 + assert sample_data.type_visit1 != sample_data.type_visit2 # Check type filter is ok actual_ov1 = swh_storage.origin_visit_get_latest( - origin.url, type=data.type_visit1, + origin.url, type=sample_data.type_visit1, ) assert actual_ov1 == origin_visit1 actual_ov3 = swh_storage.origin_visit_get_latest( - origin.url, type=data.type_visit2, + origin.url, type=sample_data.type_visit2, ) assert actual_ov3 == origin_visit3 new_type = "npm" - assert new_type not in [data.type_visit1, data.type_visit2] + assert new_type not in [sample_data.type_visit1, sample_data.type_visit2] assert ( swh_storage.origin_visit_get_latest( origin.url, type=new_type, # no visit matching that type ) is None ) def test_origin_visit_get_latest(self, swh_storage, sample_data): - empty_snapshot, complete_snapshot = sample_data["snapshot"][1:3] - origin = sample_data["origin"][0] + empty_snapshot, complete_snapshot = sample_data.snapshots[1:3] + origin = sample_data.origin swh_storage.origin_add([origin]) visit1 = OriginVisit( - origin=origin.url, date=data.date_visit1, type=data.type_visit1, + origin=origin.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ) visit2 = OriginVisit( - origin=origin.url, date=data.date_visit2, type=data.type_visit2, + origin=origin.url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ) # Add a visit with the same date as the previous one visit3 = OriginVisit( - origin=origin.url, date=data.date_visit2, type=data.type_visit2, + origin=origin.url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ) ov1, ov2, ov3 = swh_storage.origin_visit_add([visit1, visit2, visit3]) origin_visit1 = swh_storage.origin_visit_get_by(origin.url, ov1.visit) origin_visit2 = swh_storage.origin_visit_get_by(origin.url, ov2.visit) origin_visit3 = swh_storage.origin_visit_get_by(origin.url, ov3.visit) # Two visits, both with no snapshot assert origin_visit3 == swh_storage.origin_visit_get_latest(origin.url) assert ( swh_storage.origin_visit_get_latest(origin.url, require_snapshot=True) is None ) # Add snapshot to visit1; require_snapshot=True makes it return # visit1 and require_snapshot=False still returns visit2 swh_storage.snapshot_add([complete_snapshot]) swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=ov1.visit, date=now(), status="ongoing", snapshot=complete_snapshot.id, ) ] ) actual_visit = swh_storage.origin_visit_get_latest( origin.url, require_snapshot=True ) assert actual_visit == { **origin_visit1, "snapshot": complete_snapshot.id, "status": "ongoing", # visit1 has status created now } assert origin_visit3 == swh_storage.origin_visit_get_latest(origin.url) # Status filter: all three visits are status=ongoing, so no visit # returned assert ( swh_storage.origin_visit_get_latest(origin.url, allowed_statuses=["full"]) is None ) # Mark the first visit as completed and check status filter again swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=ov1.visit, date=now(), status="full", snapshot=complete_snapshot.id, ) ] ) assert { **origin_visit1, "snapshot": complete_snapshot.id, "status": "full", } == swh_storage.origin_visit_get_latest(origin.url, allowed_statuses=["full"]) assert origin_visit3 == swh_storage.origin_visit_get_latest(origin.url) # Add snapshot to visit2 and check that the new snapshot is returned swh_storage.snapshot_add([empty_snapshot]) swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=ov2.visit, date=now(), status="ongoing", snapshot=empty_snapshot.id, ) ] ) assert { **origin_visit2, "snapshot": empty_snapshot.id, "status": "ongoing", } == swh_storage.origin_visit_get_latest(origin.url, require_snapshot=True) assert origin_visit3 == swh_storage.origin_visit_get_latest(origin.url) # Check that the status filter is still working assert { **origin_visit1, "snapshot": complete_snapshot.id, "status": "full", } == swh_storage.origin_visit_get_latest(origin.url, allowed_statuses=["full"]) # Add snapshot to visit3 (same date as visit2) swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=ov3.visit, date=now(), status="ongoing", snapshot=complete_snapshot.id, ) ] ) assert { **origin_visit1, "snapshot": complete_snapshot.id, "status": "full", } == swh_storage.origin_visit_get_latest(origin.url, allowed_statuses=["full"]) assert { **origin_visit1, "snapshot": complete_snapshot.id, "status": "full", } == swh_storage.origin_visit_get_latest( origin.url, allowed_statuses=["full"], require_snapshot=True ) assert { **origin_visit3, "snapshot": complete_snapshot.id, "status": "ongoing", } == swh_storage.origin_visit_get_latest(origin.url) assert { **origin_visit3, "snapshot": complete_snapshot.id, "status": "ongoing", } == swh_storage.origin_visit_get_latest(origin.url, require_snapshot=True) def test_origin_visit_status_get_latest(self, swh_storage, sample_data): - snapshot = sample_data["snapshot"][2] - origin1 = sample_data["origin"][0] + snapshot = sample_data.snapshots[2] + origin1 = sample_data.origin swh_storage.origin_add([origin1]) # to have some reference visits ov1, ov2 = swh_storage.origin_visit_add( [ OriginVisit( - origin=origin1.url, date=data.date_visit1, type=data.type_visit1, + origin=origin1.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ), OriginVisit( - origin=origin1.url, date=data.date_visit2, type=data.type_visit2, + origin=origin1.url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ), ] ) swh_storage.snapshot_add([snapshot]) date_now = now() date_now = round_to_milliseconds(date_now) - assert data.date_visit1 < data.date_visit2 - assert data.date_visit2 < date_now + assert sample_data.date_visit1 < sample_data.date_visit2 + assert sample_data.date_visit2 < date_now ovs1 = OriginVisitStatus( origin=origin1.url, visit=ov1.visit, - date=data.date_visit1, + date=sample_data.date_visit1, status="partial", snapshot=None, ) ovs2 = OriginVisitStatus( origin=origin1.url, visit=ov1.visit, - date=data.date_visit2, + date=sample_data.date_visit2, status="ongoing", snapshot=None, ) ovs3 = OriginVisitStatus( origin=origin1.url, visit=ov2.visit, - date=data.date_visit2 + datetime.timedelta(minutes=1), # to not be ignored + date=sample_data.date_visit2 + + datetime.timedelta(minutes=1), # to not be ignored status="ongoing", snapshot=None, ) ovs4 = OriginVisitStatus( origin=origin1.url, visit=ov2.visit, date=date_now, status="full", snapshot=snapshot.id, metadata={"something": "wicked"}, ) swh_storage.origin_visit_status_add([ovs1, ovs2, ovs3, ovs4]) # unknown origin so no result actual_origin_visit = swh_storage.origin_visit_status_get_latest( "unknown-origin", ov1.visit ) assert actual_origin_visit is None # unknown visit so no result actual_origin_visit = swh_storage.origin_visit_status_get_latest( ov1.origin, ov1.visit + 10 ) assert actual_origin_visit is None # Two visits, both with no snapshot, take the most recent actual_origin_visit2 = swh_storage.origin_visit_status_get_latest( origin1.url, ov1.visit ) assert isinstance(actual_origin_visit2, OriginVisitStatus) assert actual_origin_visit2 == ovs2 assert ovs2.origin == origin1.url assert ovs2.visit == ov1.visit actual_origin_visit = swh_storage.origin_visit_status_get_latest( origin1.url, ov1.visit, require_snapshot=True ) # there is no visit with snapshot yet for that visit assert actual_origin_visit is None actual_origin_visit2 = swh_storage.origin_visit_status_get_latest( origin1.url, ov1.visit, allowed_statuses=["partial", "ongoing"] ) # visit status with partial status visit elected assert actual_origin_visit2 == ovs2 assert actual_origin_visit2.status == "ongoing" actual_origin_visit4 = swh_storage.origin_visit_status_get_latest( origin1.url, ov2.visit, require_snapshot=True ) assert actual_origin_visit4 == ovs4 assert actual_origin_visit4.snapshot == snapshot.id actual_origin_visit = swh_storage.origin_visit_status_get_latest( origin1.url, ov2.visit, require_snapshot=True, allowed_statuses=["ongoing"] ) # nothing matches so nothing assert actual_origin_visit is None # there is no visit with status full actual_origin_visit3 = swh_storage.origin_visit_status_get_latest( origin1.url, ov2.visit, allowed_statuses=["ongoing"] ) assert actual_origin_visit3 == ovs3 def test_person_fullname_unicity(self, swh_storage, sample_data): - revision, rev2 = sample_data["revision"][0:2] + revision, rev2 = sample_data.revisions[0:2] # create a revision with same committer fullname but wo name and email revision2 = attr.evolve( rev2, committer=Person( fullname=revision.committer.fullname, name=None, email=None ), ) swh_storage.revision_add([revision, revision2]) # when getting added revisions revisions = list(swh_storage.revision_get([revision.id, revision2.id])) # then check committers are the same assert revisions[0]["committer"] == revisions[1]["committer"] def test_snapshot_add_get_empty(self, swh_storage, sample_data): - empty_snapshot = sample_data["snapshot"][1] + empty_snapshot = sample_data.snapshots[1] empty_snapshot_dict = empty_snapshot.to_dict() - origin = sample_data["origin"][0] + origin = sample_data.origin swh_storage.origin_add([origin]) ov1 = swh_storage.origin_visit_add( [ OriginVisit( - origin=origin.url, date=data.date_visit1, type=data.type_visit1, + origin=origin.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ) ] )[0] actual_result = swh_storage.snapshot_add([empty_snapshot]) assert actual_result == {"snapshot:add": 1} date_now = now() swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=ov1.visit, date=date_now, status="full", snapshot=empty_snapshot.id, ) ] ) by_id = swh_storage.snapshot_get(empty_snapshot.id) assert by_id == {**empty_snapshot_dict, "next_branch": None} by_ov = swh_storage.snapshot_get_by_origin_visit(origin.url, ov1.visit) assert by_ov == {**empty_snapshot_dict, "next_branch": None} ovs1 = OriginVisitStatus.from_dict( { "origin": origin.url, - "date": data.date_visit1, + "date": sample_data.date_visit1, "visit": ov1.visit, "status": "created", "snapshot": None, "metadata": None, } ) ovs2 = OriginVisitStatus.from_dict( { "origin": origin.url, "date": date_now, "visit": ov1.visit, "status": "full", "metadata": None, "snapshot": empty_snapshot.id, } ) actual_objects = list(swh_storage.journal_writer.journal.objects) expected_objects = [ ("origin", origin), ("origin_visit", ov1), ("origin_visit_status", ovs1,), ("snapshot", empty_snapshot), ("origin_visit_status", ovs2,), ] for obj in expected_objects: assert obj in actual_objects def test_snapshot_add_get_complete(self, swh_storage, sample_data): - complete_snapshot = sample_data["snapshot"][2] + complete_snapshot = sample_data.snapshots[2] complete_snapshot_dict = complete_snapshot.to_dict() - origin = sample_data["origin"][0] + origin = sample_data.origin swh_storage.origin_add([origin]) visit = OriginVisit( - origin=origin.url, date=data.date_visit1, type=data.type_visit1, + origin=origin.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ) origin_visit1 = swh_storage.origin_visit_add([visit])[0] visit_id = origin_visit1.visit actual_result = swh_storage.snapshot_add([complete_snapshot]) swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=origin_visit1.visit, date=now(), status="ongoing", snapshot=complete_snapshot.id, ) ] ) assert actual_result == {"snapshot:add": 1} by_id = swh_storage.snapshot_get(complete_snapshot.id) assert by_id == {**complete_snapshot_dict, "next_branch": None} by_ov = swh_storage.snapshot_get_by_origin_visit(origin.url, visit_id) assert by_ov == {**complete_snapshot_dict, "next_branch": None} def test_snapshot_add_many(self, swh_storage, sample_data): - snapshot, _, complete_snapshot = sample_data["snapshot"][:3] + snapshot, _, complete_snapshot = sample_data.snapshots[:3] actual_result = swh_storage.snapshot_add([snapshot, complete_snapshot]) assert actual_result == {"snapshot:add": 2} assert swh_storage.snapshot_get(complete_snapshot.id) == { **complete_snapshot.to_dict(), "next_branch": None, } assert swh_storage.snapshot_get(snapshot.id) == { **snapshot.to_dict(), "next_branch": None, } swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["snapshot"] == 2 def test_snapshot_add_many_from_generator(self, swh_storage, sample_data): - snapshot, _, complete_snapshot = sample_data["snapshot"][:3] + snapshot, _, complete_snapshot = sample_data.snapshots[:3] def _snp_gen(): yield from [snapshot, complete_snapshot] actual_result = swh_storage.snapshot_add(_snp_gen()) assert actual_result == {"snapshot:add": 2} swh_storage.refresh_stat_counters() assert swh_storage.stat_counters()["snapshot"] == 2 def test_snapshot_add_many_incremental(self, swh_storage, sample_data): - snapshot, _, complete_snapshot = sample_data["snapshot"][:3] + snapshot, _, complete_snapshot = sample_data.snapshots[:3] actual_result = swh_storage.snapshot_add([complete_snapshot]) assert actual_result == {"snapshot:add": 1} actual_result2 = swh_storage.snapshot_add([snapshot, complete_snapshot]) assert actual_result2 == {"snapshot:add": 1} assert swh_storage.snapshot_get(complete_snapshot.id) == { **complete_snapshot.to_dict(), "next_branch": None, } assert swh_storage.snapshot_get(snapshot.id) == { **snapshot.to_dict(), "next_branch": None, } def test_snapshot_add_twice(self, swh_storage, sample_data): - snapshot, empty_snapshot = sample_data["snapshot"][:2] + snapshot, empty_snapshot = sample_data.snapshots[:2] actual_result = swh_storage.snapshot_add([empty_snapshot]) assert actual_result == {"snapshot:add": 1} assert list(swh_storage.journal_writer.journal.objects) == [ ("snapshot", empty_snapshot) ] actual_result = swh_storage.snapshot_add([snapshot]) assert actual_result == {"snapshot:add": 1} assert list(swh_storage.journal_writer.journal.objects) == [ ("snapshot", empty_snapshot), ("snapshot", snapshot), ] def test_snapshot_add_count_branches(self, swh_storage, sample_data): - complete_snapshot = sample_data["snapshot"][2] + complete_snapshot = sample_data.snapshots[2] actual_result = swh_storage.snapshot_add([complete_snapshot]) assert actual_result == {"snapshot:add": 1} snp_size = swh_storage.snapshot_count_branches(complete_snapshot.id) expected_snp_size = { "alias": 1, "content": 1, "directory": 2, "release": 1, "revision": 1, "snapshot": 1, None: 1, } assert snp_size == expected_snp_size def test_snapshot_add_get_paginated(self, swh_storage, sample_data): - complete_snapshot = sample_data["snapshot"][2] + complete_snapshot = sample_data.snapshots[2] swh_storage.snapshot_add([complete_snapshot]) snp_id = complete_snapshot.id branches = complete_snapshot.to_dict()["branches"] branch_names = list(sorted(branches)) # Test branch_from snapshot = swh_storage.snapshot_get_branches(snp_id, branches_from=b"release") rel_idx = branch_names.index(b"release") expected_snapshot = { "id": snp_id, "branches": {name: branches[name] for name in branch_names[rel_idx:]}, "next_branch": None, } assert snapshot == expected_snapshot # Test branches_count snapshot = swh_storage.snapshot_get_branches(snp_id, branches_count=1) expected_snapshot = { "id": snp_id, "branches": {branch_names[0]: branches[branch_names[0]],}, "next_branch": b"content", } assert snapshot == expected_snapshot # test branch_from + branches_count snapshot = swh_storage.snapshot_get_branches( snp_id, branches_from=b"directory", branches_count=3 ) dir_idx = branch_names.index(b"directory") expected_snapshot = { "id": snp_id, "branches": { name: branches[name] for name in branch_names[dir_idx : dir_idx + 3] }, "next_branch": branch_names[dir_idx + 3], } assert snapshot == expected_snapshot def test_snapshot_add_get_filtered(self, swh_storage, sample_data): - origin = sample_data["origin"][0] - complete_snapshot = sample_data["snapshot"][2] + origin = sample_data.origin + complete_snapshot = sample_data.snapshots[2] swh_storage.origin_add([origin]) visit = OriginVisit( - origin=origin.url, date=data.date_visit1, type=data.type_visit1, + origin=origin.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ) origin_visit1 = swh_storage.origin_visit_add([visit])[0] swh_storage.snapshot_add([complete_snapshot]) swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=origin_visit1.visit, date=now(), status="ongoing", snapshot=complete_snapshot.id, ) ] ) snp_id = complete_snapshot.id branches = complete_snapshot.to_dict()["branches"] snapshot = swh_storage.snapshot_get_branches( snp_id, target_types=["release", "revision"] ) expected_snapshot = { "id": snp_id, "branches": { name: tgt for name, tgt in branches.items() if tgt and tgt["target_type"] in ["release", "revision"] }, "next_branch": None, } assert snapshot == expected_snapshot snapshot = swh_storage.snapshot_get_branches(snp_id, target_types=["alias"]) expected_snapshot = { "id": snp_id, "branches": { name: tgt for name, tgt in branches.items() if tgt and tgt["target_type"] == "alias" }, "next_branch": None, } assert snapshot == expected_snapshot def test_snapshot_add_get_filtered_and_paginated(self, swh_storage, sample_data): - complete_snapshot = sample_data["snapshot"][2] + complete_snapshot = sample_data.snapshots[2] swh_storage.snapshot_add([complete_snapshot]) snp_id = complete_snapshot.id branches = complete_snapshot.to_dict()["branches"] branch_names = list(sorted(branches)) # Test branch_from snapshot = swh_storage.snapshot_get_branches( snp_id, target_types=["directory", "release"], branches_from=b"directory2" ) expected_snapshot = { "id": snp_id, "branches": {name: branches[name] for name in (b"directory2", b"release")}, "next_branch": None, } assert snapshot == expected_snapshot # Test branches_count snapshot = swh_storage.snapshot_get_branches( snp_id, target_types=["directory", "release"], branches_count=1 ) expected_snapshot = { "id": snp_id, "branches": {b"directory": branches[b"directory"]}, "next_branch": b"directory2", } assert snapshot == expected_snapshot # Test branches_count snapshot = swh_storage.snapshot_get_branches( snp_id, target_types=["directory", "release"], branches_count=2 ) expected_snapshot = { "id": snp_id, "branches": { name: branches[name] for name in (b"directory", b"directory2") }, "next_branch": b"release", } assert snapshot == expected_snapshot # test branch_from + branches_count snapshot = swh_storage.snapshot_get_branches( snp_id, target_types=["directory", "release"], branches_from=b"directory2", branches_count=1, ) dir_idx = branch_names.index(b"directory2") expected_snapshot = { "id": snp_id, "branches": {branch_names[dir_idx]: branches[branch_names[dir_idx]],}, "next_branch": b"release", } assert snapshot == expected_snapshot def test_snapshot_add_get_branch_by_type(self, swh_storage, sample_data): - complete_snapshot = sample_data["snapshot"][2] + complete_snapshot = sample_data.snapshots[2] snapshot = complete_snapshot.to_dict() alias1 = b"alias1" alias2 = b"alias2" target1 = random.choice(list(snapshot["branches"].keys())) target2 = random.choice(list(snapshot["branches"].keys())) snapshot["branches"][alias2] = { "target": target2, "target_type": "alias", } snapshot["branches"][alias1] = { "target": target1, "target_type": "alias", } new_snapshot = Snapshot.from_dict(snapshot) swh_storage.snapshot_add([new_snapshot]) branches = swh_storage.snapshot_get_branches( new_snapshot.id, target_types=["alias"], branches_from=alias1, branches_count=1, )["branches"] assert len(branches) == 1 assert alias1 in branches def test_snapshot_add_get(self, swh_storage, sample_data): - snapshot = sample_data["snapshot"][0] - origin = sample_data["origin"][0] + snapshot = sample_data.snapshot + origin = sample_data.origin swh_storage.origin_add([origin]) visit = OriginVisit( - origin=origin.url, date=data.date_visit1, type=data.type_visit1, + origin=origin.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ) origin_visit1 = swh_storage.origin_visit_add([visit])[0] visit_id = origin_visit1.visit swh_storage.snapshot_add([snapshot]) swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=origin_visit1.visit, date=now(), status="ongoing", snapshot=snapshot.id, ) ] ) expected_snapshot = {**snapshot.to_dict(), "next_branch": None} by_id = swh_storage.snapshot_get(snapshot.id) assert by_id == expected_snapshot by_ov = swh_storage.snapshot_get_by_origin_visit(origin.url, visit_id) assert by_ov == expected_snapshot origin_visit_info = swh_storage.origin_visit_get_by(origin.url, visit_id) assert origin_visit_info["snapshot"] == snapshot.id def test_snapshot_add_twice__by_origin_visit(self, swh_storage, sample_data): - snapshot = sample_data["snapshot"][0] - origin = sample_data["origin"][0] + snapshot = sample_data.snapshot + origin = sample_data.origin swh_storage.origin_add([origin]) ov1 = swh_storage.origin_visit_add( [ OriginVisit( - origin=origin.url, date=data.date_visit1, type=data.type_visit1, + origin=origin.url, + date=sample_data.date_visit1, + type=sample_data.type_visit1, ) ] )[0] swh_storage.snapshot_add([snapshot]) date_now2 = now() swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=ov1.visit, date=date_now2, status="ongoing", snapshot=snapshot.id, ) ] ) expected_snapshot = {**snapshot.to_dict(), "next_branch": None} by_ov1 = swh_storage.snapshot_get_by_origin_visit(origin.url, ov1.visit) assert by_ov1 == expected_snapshot ov2 = swh_storage.origin_visit_add( [ OriginVisit( - origin=origin.url, date=data.date_visit2, type=data.type_visit2, + origin=origin.url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ) ] )[0] date_now4 = now() swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=ov2.visit, date=date_now4, status="ongoing", snapshot=snapshot.id, ) ] ) by_ov2 = swh_storage.snapshot_get_by_origin_visit(origin.url, ov2.visit) assert by_ov2 == expected_snapshot ovs1 = OriginVisitStatus.from_dict( { "origin": origin.url, - "date": data.date_visit1, + "date": sample_data.date_visit1, "visit": ov1.visit, "status": "created", "metadata": None, "snapshot": None, } ) ovs2 = OriginVisitStatus.from_dict( { "origin": origin.url, "date": date_now2, "visit": ov1.visit, "status": "ongoing", "metadata": None, "snapshot": snapshot.id, } ) ovs3 = OriginVisitStatus.from_dict( { "origin": origin.url, - "date": data.date_visit2, + "date": sample_data.date_visit2, "visit": ov2.visit, "status": "created", "metadata": None, "snapshot": None, } ) ovs4 = OriginVisitStatus.from_dict( { "origin": origin.url, "date": date_now4, "visit": ov2.visit, "status": "ongoing", "metadata": None, "snapshot": snapshot.id, } ) actual_objects = list(swh_storage.journal_writer.journal.objects) expected_objects = [ ("origin", origin), ("origin_visit", ov1), ("origin_visit_status", ovs1), ("snapshot", snapshot), ("origin_visit_status", ovs2), ("origin_visit", ov2), ("origin_visit_status", ovs3), ("origin_visit_status", ovs4), ] for obj in expected_objects: assert obj in actual_objects def test_snapshot_get_random(self, swh_storage, sample_data): - snapshot, empty_snapshot, complete_snapshot = sample_data["snapshot"][:3] + snapshot, empty_snapshot, complete_snapshot = sample_data.snapshots[:3] swh_storage.snapshot_add([snapshot, empty_snapshot, complete_snapshot]) assert swh_storage.snapshot_get_random() in { snapshot.id, empty_snapshot.id, complete_snapshot.id, } def test_snapshot_missing(self, swh_storage, sample_data): - snapshot, missing_snapshot = sample_data["snapshot"][:2] + snapshot, missing_snapshot = sample_data.snapshots[:2] snapshots = [snapshot.id, missing_snapshot.id] swh_storage.snapshot_add([snapshot]) missing_snapshots = swh_storage.snapshot_missing(snapshots) assert list(missing_snapshots) == [missing_snapshot.id] def test_stat_counters(self, swh_storage, sample_data): - origin = sample_data["origin"][0] - snapshot = sample_data["snapshot"][0] - revision = sample_data["revision"][0] - release = sample_data["release"][0] - directory = sample_data["directory"][0] - content = sample_data["content"][0] + origin = sample_data.origin + snapshot = sample_data.snapshot + revision = sample_data.revision + release = sample_data.release + directory = sample_data.directory + content = sample_data.content expected_keys = ["content", "directory", "origin", "revision"] # Initially, all counters are 0 swh_storage.refresh_stat_counters() counters = swh_storage.stat_counters() assert set(expected_keys) <= set(counters) for key in expected_keys: assert counters[key] == 0 # Add a content. Only the content counter should increase. swh_storage.content_add([content]) swh_storage.refresh_stat_counters() counters = swh_storage.stat_counters() assert set(expected_keys) <= set(counters) for key in expected_keys: if key != "content": assert counters[key] == 0 assert counters["content"] == 1 # Add other objects. Check their counter increased as well. swh_storage.origin_add([origin]) visit = OriginVisit( - origin=origin.url, date=data.date_visit2, type=data.type_visit2, + origin=origin.url, + date=sample_data.date_visit2, + type=sample_data.type_visit2, ) origin_visit1 = swh_storage.origin_visit_add([visit])[0] swh_storage.snapshot_add([snapshot]) swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin.url, visit=origin_visit1.visit, date=now(), status="ongoing", snapshot=snapshot.id, ) ] ) swh_storage.directory_add([directory]) swh_storage.revision_add([revision]) swh_storage.release_add([release]) swh_storage.refresh_stat_counters() counters = swh_storage.stat_counters() assert counters["content"] == 1 assert counters["directory"] == 1 assert counters["snapshot"] == 1 assert counters["origin"] == 1 assert counters["origin_visit"] == 1 assert counters["revision"] == 1 assert counters["release"] == 1 assert counters["snapshot"] == 1 if "person" in counters: assert counters["person"] == 3 def test_content_find_ctime(self, swh_storage, sample_data): - origin_content = sample_data["content"][0] + origin_content = sample_data.content ctime = round_to_milliseconds(now()) content = attr.evolve(origin_content, data=None, ctime=ctime) swh_storage.content_add_metadata([content]) actually_present = swh_storage.content_find({"sha1": content.sha1}) assert actually_present[0] == content.to_dict() def test_content_find_with_present_content(self, swh_storage, sample_data): - content = sample_data["content"][0] + content = sample_data.content expected_content = content.to_dict() del expected_content["data"] del expected_content["ctime"] # 1. with something to find swh_storage.content_add([content]) actually_present = swh_storage.content_find({"sha1": content.sha1}) assert 1 == len(actually_present) actually_present[0].pop("ctime") assert actually_present[0] == expected_content # 2. with something to find actually_present = swh_storage.content_find({"sha1_git": content.sha1_git}) assert 1 == len(actually_present) actually_present[0].pop("ctime") assert actually_present[0] == expected_content # 3. with something to find actually_present = swh_storage.content_find({"sha256": content.sha256}) assert 1 == len(actually_present) actually_present[0].pop("ctime") assert actually_present[0] == expected_content # 4. with something to find actually_present = swh_storage.content_find(content.hashes()) assert 1 == len(actually_present) actually_present[0].pop("ctime") assert actually_present[0] == expected_content def test_content_find_with_non_present_content(self, swh_storage, sample_data): - missing_content = sample_data["skipped_content"][0] + missing_content = sample_data.skipped_content # 1. with something that does not exist actually_present = swh_storage.content_find({"sha1": missing_content.sha1}) assert actually_present == [] # 2. with something that does not exist actually_present = swh_storage.content_find( {"sha1_git": missing_content.sha1_git} ) assert actually_present == [] # 3. with something that does not exist actually_present = swh_storage.content_find({"sha256": missing_content.sha256}) assert actually_present == [] def test_content_find_with_duplicate_input(self, swh_storage, sample_data): - content = sample_data["content"][0] + content = sample_data.content # Create fake data with colliding sha256 and blake2s256 sha1_array = bytearray(content.sha1) sha1_array[0] += 1 sha1git_array = bytearray(content.sha1_git) sha1git_array[0] += 1 duplicated_content = attr.evolve( content, sha1=bytes(sha1_array), sha1_git=bytes(sha1git_array) ) # Inject the data swh_storage.content_add([content, duplicated_content]) actual_result = list( swh_storage.content_find( { "blake2s256": duplicated_content.blake2s256, "sha256": duplicated_content.sha256, } ) ) expected_content = content.to_dict() expected_duplicated_content = duplicated_content.to_dict() for key in ["data", "ctime"]: # so we can compare for dict_ in [ expected_content, expected_duplicated_content, actual_result[0], actual_result[1], ]: dict_.pop(key, None) expected_result = [expected_content, expected_duplicated_content] for result in expected_result: assert result in actual_result def test_content_find_with_duplicate_sha256(self, swh_storage, sample_data): - content = sample_data["content"][0] + content = sample_data.content hashes = {} # Create fake data with colliding sha256 for hashalgo in ("sha1", "sha1_git", "blake2s256"): value = bytearray(getattr(content, hashalgo)) value[0] += 1 hashes[hashalgo] = bytes(value) duplicated_content = attr.evolve( content, sha1=hashes["sha1"], sha1_git=hashes["sha1_git"], blake2s256=hashes["blake2s256"], ) swh_storage.content_add([content, duplicated_content]) actual_result = list( swh_storage.content_find({"sha256": duplicated_content.sha256}) ) assert len(actual_result) == 2 expected_content = content.to_dict() expected_duplicated_content = duplicated_content.to_dict() for key in ["data", "ctime"]: # so we can compare for dict_ in [ expected_content, expected_duplicated_content, actual_result[0], actual_result[1], ]: dict_.pop(key, None) assert sorted(actual_result, key=lambda x: x["sha1"]) == [ expected_content, expected_duplicated_content, ] # Find with both sha256 and blake2s256 actual_result = list( swh_storage.content_find( { "sha256": duplicated_content.sha256, "blake2s256": duplicated_content.blake2s256, } ) ) assert len(actual_result) == 1 actual_result[0].pop("ctime") assert actual_result == [expected_duplicated_content] def test_content_find_with_duplicate_blake2s256(self, swh_storage, sample_data): - content = sample_data["content"][0] + content = sample_data.content # Create fake data with colliding sha256 and blake2s256 sha1_array = bytearray(content.sha1) sha1_array[0] += 1 sha1git_array = bytearray(content.sha1_git) sha1git_array[0] += 1 sha256_array = bytearray(content.sha256) sha256_array[0] += 1 duplicated_content = attr.evolve( content, sha1=bytes(sha1_array), sha1_git=bytes(sha1git_array), sha256=bytes(sha256_array), ) swh_storage.content_add([content, duplicated_content]) actual_result = list( swh_storage.content_find({"blake2s256": duplicated_content.blake2s256}) ) expected_content = content.to_dict() expected_duplicated_content = duplicated_content.to_dict() for key in ["data", "ctime"]: # so we can compare for dict_ in [ expected_content, expected_duplicated_content, actual_result[0], actual_result[1], ]: dict_.pop(key, None) expected_result = [expected_content, expected_duplicated_content] for result in expected_result: assert result in actual_result # Find with both sha256 and blake2s256 actual_result = list( swh_storage.content_find( { "sha256": duplicated_content.sha256, "blake2s256": duplicated_content.blake2s256, } ) ) actual_result[0].pop("ctime") assert actual_result == [expected_duplicated_content] def test_content_find_bad_input(self, swh_storage): # 1. with bad input with pytest.raises(StorageArgumentException): swh_storage.content_find({}) # empty is bad # 2. with bad input with pytest.raises(StorageArgumentException): swh_storage.content_find({"unknown-sha1": "something"}) # not the right key def test_object_find_by_sha1_git(self, swh_storage, sample_data): - content = sample_data["content"][0] - directory = sample_data["directory"][0] - revision = sample_data["revision"][0] - release = sample_data["release"][0] + content = sample_data.content + directory = sample_data.directory + revision = sample_data.revision + release = sample_data.release sha1_gits = [b"00000000000000000000"] expected = { b"00000000000000000000": [], } swh_storage.content_add([content]) sha1_gits.append(content.sha1_git) expected[content.sha1_git] = [ {"sha1_git": content.sha1_git, "type": "content",} ] swh_storage.directory_add([directory]) sha1_gits.append(directory.id) expected[directory.id] = [{"sha1_git": directory.id, "type": "directory",}] swh_storage.revision_add([revision]) sha1_gits.append(revision.id) expected[revision.id] = [{"sha1_git": revision.id, "type": "revision",}] swh_storage.release_add([release]) sha1_gits.append(release.id) expected[release.id] = [{"sha1_git": release.id, "type": "release",}] ret = swh_storage.object_find_by_sha1_git(sha1_gits) assert expected == ret def test_metadata_fetcher_add_get(self, swh_storage, sample_data): - fetcher = sample_data["fetcher"][0] + fetcher = sample_data.metadata_fetcher actual_fetcher = swh_storage.metadata_fetcher_get(fetcher.name, fetcher.version) assert actual_fetcher is None # does not exist swh_storage.metadata_fetcher_add([fetcher]) res = swh_storage.metadata_fetcher_get(fetcher.name, fetcher.version) assert res == fetcher def test_metadata_authority_add_get(self, swh_storage, sample_data): - authority = sample_data["authority"][0] + authority = sample_data.metadata_authority actual_authority = swh_storage.metadata_authority_get( authority.type, authority.url ) assert actual_authority is None # does not exist swh_storage.metadata_authority_add([authority]) res = swh_storage.metadata_authority_get(authority.type, authority.url) assert res == authority def test_content_metadata_add(self, swh_storage, sample_data): - content = sample_data["content"][0] - fetcher = sample_data["fetcher"][0] - authority = sample_data["authority"][0] - content_metadata = sample_data["content_metadata"][:2] + content = sample_data.content + fetcher = sample_data.metadata_fetcher + authority = sample_data.metadata_authority + content_metadata = sample_data.content_metadata[:2] content_swhid = SWHID( object_type="content", object_id=hash_to_bytes(content.sha1_git) ) swh_storage.metadata_fetcher_add([fetcher]) swh_storage.metadata_authority_add([authority]) swh_storage.object_metadata_add(content_metadata) result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content_swhid, authority ) assert result["next_page_token"] is None assert list(sorted(result["results"], key=lambda x: x.discovery_date,)) == list( content_metadata ) def test_content_metadata_add_duplicate(self, swh_storage, sample_data): """Duplicates should be silently updated.""" - content = sample_data["content"][0] - fetcher = sample_data["fetcher"][0] - authority = sample_data["authority"][0] - content_metadata, content_metadata2 = sample_data["content_metadata"][:2] + content = sample_data.content + fetcher = sample_data.metadata_fetcher + authority = sample_data.metadata_authority + content_metadata, content_metadata2 = sample_data.content_metadata[:2] content_swhid = SWHID( object_type="content", object_id=hash_to_bytes(content.sha1_git) ) new_content_metadata2 = attr.evolve( content_metadata2, format="new-format", metadata=b"new-metadata", ) swh_storage.metadata_fetcher_add([fetcher]) swh_storage.metadata_authority_add([authority]) swh_storage.object_metadata_add([content_metadata, content_metadata2]) swh_storage.object_metadata_add([new_content_metadata2]) result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content_swhid, authority ) assert result["next_page_token"] is None expected_results1 = (content_metadata, new_content_metadata2) expected_results2 = (content_metadata, content_metadata2) assert tuple(sorted(result["results"], key=lambda x: x.discovery_date,)) in ( expected_results1, # cassandra expected_results2, # postgresql ) def test_content_metadata_get(self, swh_storage, sample_data): - content, content2 = sample_data["content"][:2] - fetcher, fetcher2 = sample_data["fetcher"][:2] - authority, authority2 = sample_data["authority"][:2] - content1_metadata1, content1_metadata2, content1_metadata3 = sample_data[ - "content_metadata" - ][:3] + content, content2 = sample_data.contents[:2] + fetcher, fetcher2 = sample_data.fetchers[:2] + authority, authority2 = sample_data.authorities[:2] + ( + content1_metadata1, + content1_metadata2, + content1_metadata3, + ) = sample_data.content_metadata[:3] content1_swhid = SWHID(object_type="content", object_id=content.sha1_git) content2_swhid = SWHID(object_type="content", object_id=content2.sha1_git) content2_metadata = attr.evolve(content1_metadata2, id=content2_swhid) swh_storage.metadata_authority_add([authority, authority2]) swh_storage.metadata_fetcher_add([fetcher, fetcher2]) swh_storage.object_metadata_add( [ content1_metadata1, content1_metadata2, content1_metadata3, content2_metadata, ] ) result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content1_swhid, authority ) assert result["next_page_token"] is None assert [content1_metadata1, content1_metadata2] == list( sorted(result["results"], key=lambda x: x.discovery_date,) ) result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content1_swhid, authority2 ) assert result["next_page_token"] is None assert [content1_metadata3] == list( sorted(result["results"], key=lambda x: x.discovery_date,) ) result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content2_swhid, authority ) assert result["next_page_token"] is None assert [content2_metadata] == list(result["results"],) def test_content_metadata_get_after(self, swh_storage, sample_data): - content = sample_data["content"][0] - fetcher = sample_data["fetcher"][0] - authority = sample_data["authority"][0] - content_metadata, content_metadata2 = sample_data["content_metadata"][:2] + content = sample_data.content + fetcher = sample_data.metadata_fetcher + authority = sample_data.metadata_authority + content_metadata, content_metadata2 = sample_data.content_metadata[:2] content_swhid = SWHID(object_type="content", object_id=content.sha1_git) swh_storage.metadata_fetcher_add([fetcher]) swh_storage.metadata_authority_add([authority]) swh_storage.object_metadata_add([content_metadata, content_metadata2]) result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content_swhid, authority, after=content_metadata.discovery_date - timedelta(seconds=1), ) assert result["next_page_token"] is None assert [content_metadata, content_metadata2] == list( sorted(result["results"], key=lambda x: x.discovery_date,) ) result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content_swhid, authority, after=content_metadata.discovery_date, ) assert result["next_page_token"] is None assert result["results"] == [content_metadata2] result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content_swhid, authority, after=content_metadata2.discovery_date, ) assert result["next_page_token"] is None assert result["results"] == [] def test_content_metadata_get_paginate(self, swh_storage, sample_data): - content = sample_data["content"][0] - fetcher = sample_data["fetcher"][0] - authority = sample_data["authority"][0] - content_metadata, content_metadata2 = sample_data["content_metadata"][:2] + content = sample_data.content + fetcher = sample_data.metadata_fetcher + authority = sample_data.metadata_authority + content_metadata, content_metadata2 = sample_data.content_metadata[:2] content_swhid = SWHID(object_type="content", object_id=content.sha1_git) swh_storage.metadata_fetcher_add([fetcher]) swh_storage.metadata_authority_add([authority]) swh_storage.object_metadata_add([content_metadata, content_metadata2]) swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content_swhid, authority ) result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content_swhid, authority, limit=1 ) assert result["next_page_token"] is not None assert result["results"] == [content_metadata] result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content_swhid, authority, limit=1, page_token=result["next_page_token"], ) assert result["next_page_token"] is None assert result["results"] == [content_metadata2] def test_content_metadata_get_paginate_same_date(self, swh_storage, sample_data): - content = sample_data["content"][0] - fetcher1, fetcher2 = sample_data["fetcher"][:2] - authority = sample_data["authority"][0] - content_metadata, content_metadata2 = sample_data["content_metadata"][:2] + content = sample_data.content + fetcher1, fetcher2 = sample_data.fetchers[:2] + authority = sample_data.metadata_authority + content_metadata, content_metadata2 = sample_data.content_metadata[:2] content_swhid = SWHID(object_type="content", object_id=content.sha1_git) swh_storage.metadata_fetcher_add([fetcher1, fetcher2]) swh_storage.metadata_authority_add([authority]) new_content_metadata2 = attr.evolve( content_metadata2, discovery_date=content_metadata2.discovery_date, fetcher=attr.evolve(fetcher2, metadata=None), ) swh_storage.object_metadata_add([content_metadata, new_content_metadata2]) result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content_swhid, authority, limit=1 ) assert result["next_page_token"] is not None assert result["results"] == [content_metadata] result = swh_storage.object_metadata_get( MetadataTargetType.CONTENT, content_swhid, authority, limit=1, page_token=result["next_page_token"], ) assert result["next_page_token"] is None assert result["results"] == [new_content_metadata2] def test_content_metadata_get__invalid_id(self, swh_storage, sample_data): - origin = sample_data["origin"][0] - fetcher = sample_data["fetcher"][0] - authority = sample_data["authority"][0] - content_metadata, content_metadata2 = sample_data["content_metadata"][:2] + origin = sample_data.origin + fetcher = sample_data.metadata_fetcher + authority = sample_data.metadata_authority + content_metadata, content_metadata2 = sample_data.content_metadata[:2] swh_storage.metadata_fetcher_add([fetcher]) swh_storage.metadata_authority_add([authority]) swh_storage.object_metadata_add([content_metadata, content_metadata2]) with pytest.raises(StorageArgumentException, match="SWHID"): swh_storage.object_metadata_get( MetadataTargetType.CONTENT, origin.url, authority ) def test_origin_metadata_add(self, swh_storage, sample_data): - origin = sample_data["origin"][0] - fetcher = sample_data["fetcher"][0] - authority = sample_data["authority"][0] - origin_metadata, origin_metadata2 = sample_data["origin_metadata"][:2] + origin = sample_data.origin + fetcher = sample_data.metadata_fetcher + authority = sample_data.metadata_authority + origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2] assert swh_storage.origin_add([origin]) == {"origin:add": 1} swh_storage.metadata_fetcher_add([fetcher]) swh_storage.metadata_authority_add([authority]) swh_storage.object_metadata_add([origin_metadata, origin_metadata2]) result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority ) assert result["next_page_token"] is None assert list(sorted(result["results"], key=lambda x: x.discovery_date)) == [ origin_metadata, origin_metadata2, ] def test_origin_metadata_add_duplicate(self, swh_storage, sample_data): """Duplicates should be silently updated.""" - origin = sample_data["origin"][0] - fetcher = sample_data["fetcher"][0] - authority = sample_data["authority"][0] - origin_metadata, origin_metadata2 = sample_data["origin_metadata"][:2] + origin = sample_data.origin + fetcher = sample_data.metadata_fetcher + authority = sample_data.metadata_authority + origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2] assert swh_storage.origin_add([origin]) == {"origin:add": 1} new_origin_metadata2 = attr.evolve( origin_metadata2, format="new-format", metadata=b"new-metadata", ) swh_storage.metadata_fetcher_add([fetcher]) swh_storage.metadata_authority_add([authority]) swh_storage.object_metadata_add([origin_metadata, origin_metadata2]) swh_storage.object_metadata_add([new_origin_metadata2]) result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority ) assert result["next_page_token"] is None # which of the two behavior happens is backend-specific. expected_results1 = (origin_metadata, new_origin_metadata2) expected_results2 = (origin_metadata, origin_metadata2) assert tuple(sorted(result["results"], key=lambda x: x.discovery_date,)) in ( expected_results1, # cassandra expected_results2, # postgresql ) def test_origin_metadata_get(self, swh_storage, sample_data): - origin, origin2 = sample_data["origin"][:2] - fetcher, fetcher2 = sample_data["fetcher"][:2] - authority, authority2 = sample_data["authority"][:2] - origin1_metadata1, origin1_metadata2, origin1_metadata3 = sample_data[ - "origin_metadata" - ][:3] + origin, origin2 = sample_data.origins[:2] + fetcher, fetcher2 = sample_data.fetchers[:2] + authority, authority2 = sample_data.authorities[:2] + ( + origin1_metadata1, + origin1_metadata2, + origin1_metadata3, + ) = sample_data.origin_metadata[:3] assert swh_storage.origin_add([origin, origin2]) == {"origin:add": 2} origin2_metadata = attr.evolve(origin1_metadata2, id=origin2.url) swh_storage.metadata_authority_add([authority, authority2]) swh_storage.metadata_fetcher_add([fetcher, fetcher2]) swh_storage.object_metadata_add( [origin1_metadata1, origin1_metadata2, origin1_metadata3, origin2_metadata] ) result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority ) assert result["next_page_token"] is None assert [origin1_metadata1, origin1_metadata2] == list( sorted(result["results"], key=lambda x: x.discovery_date,) ) result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority2 ) assert result["next_page_token"] is None assert [origin1_metadata3] == list( sorted(result["results"], key=lambda x: x.discovery_date,) ) result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin2.url, authority ) assert result["next_page_token"] is None assert [origin2_metadata] == list(result["results"],) def test_origin_metadata_get_after(self, swh_storage, sample_data): - origin = sample_data["origin"][0] - fetcher = sample_data["fetcher"][0] - authority = sample_data["authority"][0] - origin_metadata, origin_metadata2 = sample_data["origin_metadata"][:2] + origin = sample_data.origin + fetcher = sample_data.metadata_fetcher + authority = sample_data.metadata_authority + origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2] assert swh_storage.origin_add([origin]) == {"origin:add": 1} swh_storage.metadata_fetcher_add([fetcher]) swh_storage.metadata_authority_add([authority]) swh_storage.object_metadata_add([origin_metadata, origin_metadata2]) result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority, after=origin_metadata.discovery_date - timedelta(seconds=1), ) assert result["next_page_token"] is None assert list(sorted(result["results"], key=lambda x: x.discovery_date,)) == [ origin_metadata, origin_metadata2, ] result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority, after=origin_metadata.discovery_date, ) assert result["next_page_token"] is None assert result["results"] == [origin_metadata2] result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority, after=origin_metadata2.discovery_date, ) assert result["next_page_token"] is None assert result["results"] == [] def test_origin_metadata_get_paginate(self, swh_storage, sample_data): - origin = sample_data["origin"][0] - fetcher = sample_data["fetcher"][0] - authority = sample_data["authority"][0] - origin_metadata, origin_metadata2 = sample_data["origin_metadata"][:2] + origin = sample_data.origin + fetcher = sample_data.metadata_fetcher + authority = sample_data.metadata_authority + origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2] assert swh_storage.origin_add([origin]) == {"origin:add": 1} swh_storage.metadata_fetcher_add([fetcher]) swh_storage.metadata_authority_add([authority]) swh_storage.object_metadata_add([origin_metadata, origin_metadata2]) swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority ) result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority, limit=1 ) assert result["next_page_token"] is not None assert result["results"] == [origin_metadata] result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority, limit=1, page_token=result["next_page_token"], ) assert result["next_page_token"] is None assert result["results"] == [origin_metadata2] def test_origin_metadata_get_paginate_same_date(self, swh_storage, sample_data): - origin = sample_data["origin"][0] - fetcher1, fetcher2 = sample_data["fetcher"][:2] - authority = sample_data["authority"][0] - origin_metadata, origin_metadata2 = sample_data["origin_metadata"][:2] + origin = sample_data.origin + fetcher1, fetcher2 = sample_data.fetchers[:2] + authority = sample_data.metadata_authority + origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2] assert swh_storage.origin_add([origin]) == {"origin:add": 1} swh_storage.metadata_fetcher_add([fetcher1, fetcher2]) swh_storage.metadata_authority_add([authority]) new_origin_metadata2 = attr.evolve( origin_metadata2, discovery_date=origin_metadata2.discovery_date, fetcher=attr.evolve(fetcher2, metadata=None), ) swh_storage.object_metadata_add([origin_metadata, new_origin_metadata2]) result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority, limit=1 ) assert result["next_page_token"] is not None assert result["results"] == [origin_metadata] result = swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, origin.url, authority, limit=1, page_token=result["next_page_token"], ) assert result["next_page_token"] is None assert result["results"] == [new_origin_metadata2] def test_origin_metadata_add_missing_authority(self, swh_storage, sample_data): - origin = sample_data["origin"][0] - fetcher = sample_data["fetcher"][0] - origin_metadata, origin_metadata2 = sample_data["origin_metadata"][:2] + origin = sample_data.origin + fetcher = sample_data.metadata_fetcher + origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2] assert swh_storage.origin_add([origin]) == {"origin:add": 1} swh_storage.metadata_fetcher_add([fetcher]) with pytest.raises(StorageArgumentException, match="authority"): swh_storage.object_metadata_add([origin_metadata, origin_metadata2]) def test_origin_metadata_add_missing_fetcher(self, swh_storage, sample_data): - origin = sample_data["origin"][0] - authority = sample_data["authority"][0] - origin_metadata, origin_metadata2 = sample_data["origin_metadata"][:2] + origin = sample_data.origin + authority = sample_data.metadata_authority + origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2] assert swh_storage.origin_add([origin]) == {"origin:add": 1} swh_storage.metadata_authority_add([authority]) with pytest.raises(StorageArgumentException, match="fetcher"): swh_storage.object_metadata_add([origin_metadata, origin_metadata2]) def test_origin_metadata_get__invalid_id_type(self, swh_storage, sample_data): - origin = sample_data["origin"][0] - authority = sample_data["authority"][0] - fetcher = sample_data["fetcher"][0] - origin_metadata, origin_metadata2 = sample_data["origin_metadata"][:2] - content_metadata = sample_data["content_metadata"][0] + origin = sample_data.origin + authority = sample_data.metadata_authority + fetcher = sample_data.metadata_fetcher + origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2] + content_metadata = sample_data.content_metadata[0] assert swh_storage.origin_add([origin]) == {"origin:add": 1} swh_storage.metadata_fetcher_add([fetcher]) swh_storage.metadata_authority_add([authority]) swh_storage.object_metadata_add([origin_metadata, origin_metadata2]) with pytest.raises(StorageArgumentException, match="SWHID"): swh_storage.object_metadata_get( MetadataTargetType.ORIGIN, content_metadata.id, authority, ) class TestStorageGeneratedData: def test_generate_content_get(self, swh_storage, swh_contents): contents_with_data = [c.to_dict() for c in swh_contents if c.status != "absent"] # input the list of sha1s we want from storage get_sha1s = [c["sha1"] for c in contents_with_data] # retrieve contents actual_contents = list(swh_storage.content_get(get_sha1s)) assert None not in actual_contents assert_contents_ok(contents_with_data, actual_contents) def test_generate_content_get_metadata(self, swh_storage, swh_contents): # input the list of sha1s we want from storage expected_contents = [c.to_dict() for c in swh_contents if c.status != "absent"] get_sha1s = [c["sha1"] for c in expected_contents] # retrieve contents meta_contents = swh_storage.content_get_metadata(get_sha1s) assert len(list(meta_contents)) == len(get_sha1s) actual_contents = [] for contents in meta_contents.values(): actual_contents.extend(contents) keys_to_check = {"length", "status", "sha1", "sha1_git", "sha256", "blake2s256"} assert_contents_ok( expected_contents, actual_contents, keys_to_check=keys_to_check ) def test_generate_content_get_range(self, swh_storage, swh_contents): """content_get_range returns complete range""" present_contents = [c.to_dict() for c in swh_contents if c.status != "absent"] get_sha1s = sorted([c.sha1 for c in swh_contents if c.status != "absent"]) start = get_sha1s[2] end = get_sha1s[-2] actual_result = swh_storage.content_get_range(start, end) assert actual_result["next"] is None actual_contents = actual_result["contents"] expected_contents = [c for c in present_contents if start <= c["sha1"] <= end] if expected_contents: assert_contents_ok(expected_contents, actual_contents, ["sha1"]) else: assert actual_contents == [] def test_generate_content_get_range_full(self, swh_storage, swh_contents): """content_get_range for a full range returns all available contents""" present_contents = [c.to_dict() for c in swh_contents if c.status != "absent"] start = b"0" * 40 end = b"f" * 40 actual_result = swh_storage.content_get_range(start, end) assert actual_result["next"] is None actual_contents = actual_result["contents"] expected_contents = [c for c in present_contents if start <= c["sha1"] <= end] if expected_contents: assert_contents_ok(expected_contents, actual_contents, ["sha1"]) else: assert actual_contents == [] def test_generate_content_get_range_empty(self, swh_storage, swh_contents): """content_get_range for an empty range returns nothing""" start = b"0" * 40 end = b"f" * 40 actual_result = swh_storage.content_get_range(end, start) assert actual_result["next"] is None assert len(actual_result["contents"]) == 0 def test_generate_content_get_range_limit_none(self, swh_storage): """content_get_range call with wrong limit input should fail""" with pytest.raises(StorageArgumentException) as e: swh_storage.content_get_range(start=None, end=None, limit=None) assert e.value.args == ("limit should not be None",) def test_generate_content_get_range_no_limit(self, swh_storage, swh_contents): """content_get_range returns contents within range provided""" # input the list of sha1s we want from storage get_sha1s = sorted([c.sha1 for c in swh_contents if c.status != "absent"]) start = get_sha1s[0] end = get_sha1s[-1] # retrieve contents actual_result = swh_storage.content_get_range(start, end) actual_contents = actual_result["contents"] assert actual_result["next"] is None assert len(actual_contents) == len(get_sha1s) expected_contents = [c.to_dict() for c in swh_contents if c.status != "absent"] assert_contents_ok(expected_contents, actual_contents, ["sha1"]) def test_generate_content_get_range_limit(self, swh_storage, swh_contents): """content_get_range paginates results if limit exceeded""" contents_map = {c.sha1: c.to_dict() for c in swh_contents} # input the list of sha1s we want from storage get_sha1s = sorted([c.sha1 for c in swh_contents if c.status != "absent"]) start = get_sha1s[0] end = get_sha1s[-1] # retrieve contents limited to n-1 results limited_results = len(get_sha1s) - 1 actual_result = swh_storage.content_get_range(start, end, limit=limited_results) actual_contents = actual_result["contents"] assert actual_result["next"] == get_sha1s[-1] assert len(actual_contents) == limited_results expected_contents = [contents_map[sha1] for sha1 in get_sha1s[:-1]] assert_contents_ok(expected_contents, actual_contents, ["sha1"]) # retrieve next part actual_results2 = swh_storage.content_get_range(start=end, end=end) assert actual_results2["next"] is None actual_contents2 = actual_results2["contents"] assert len(actual_contents2) == 1 assert_contents_ok([contents_map[get_sha1s[-1]]], actual_contents2, ["sha1"]) def test_origin_get_range_from_zero(self, swh_storage, swh_origins): actual_origins = list( swh_storage.origin_get_range(origin_from=0, origin_count=0) ) assert len(actual_origins) == 0 actual_origins = list( swh_storage.origin_get_range(origin_from=0, origin_count=1) ) assert len(actual_origins) == 1 assert actual_origins[0]["id"] == 1 assert actual_origins[0]["url"] == swh_origins[0].url @pytest.mark.parametrize( "origin_from,origin_count", [(1, 1), (1, 10), (1, 20), (1, 101), (11, 0), (11, 10), (91, 11)], ) def test_origin_get_range( self, swh_storage, swh_origins, origin_from, origin_count ): actual_origins = list( swh_storage.origin_get_range( origin_from=origin_from, origin_count=origin_count ) ) origins_with_id = list(enumerate(swh_origins, start=1)) expected_origins = [ {"url": origin.url, "id": origin_id,} for (origin_id, origin) in origins_with_id[ origin_from - 1 : origin_from + origin_count - 1 ] ] assert actual_origins == expected_origins @pytest.mark.parametrize("limit", [1, 7, 10, 100, 1000]) def test_origin_list(self, swh_storage, swh_origins, limit): returned_origins = [] page_token = None i = 0 while True: result = swh_storage.origin_list(page_token=page_token, limit=limit) assert len(result["origins"]) <= limit returned_origins.extend(origin["url"] for origin in result["origins"]) i += 1 page_token = result.get("next_page_token") if page_token is None: assert i * limit >= len(swh_origins) break else: assert len(result["origins"]) == limit expected_origins = [origin.url for origin in swh_origins] assert sorted(returned_origins) == sorted(expected_origins) def test_origin_count(self, swh_storage, sample_data): - swh_storage.origin_add(sample_data["origin"]) + swh_storage.origin_add(sample_data.origins) assert swh_storage.origin_count("github") == 3 assert swh_storage.origin_count("gitlab") == 2 assert swh_storage.origin_count(".*user.*", regexp=True) == 5 assert swh_storage.origin_count(".*user.*", regexp=False) == 0 assert swh_storage.origin_count(".*user1.*", regexp=True) == 2 assert swh_storage.origin_count(".*user1.*", regexp=False) == 0 def test_origin_count_with_visit_no_visits(self, swh_storage, sample_data): - swh_storage.origin_add(sample_data["origin"]) + swh_storage.origin_add(sample_data.origins) # none of them have visits, so with_visit=True => 0 assert swh_storage.origin_count("github", with_visit=True) == 0 assert swh_storage.origin_count("gitlab", with_visit=True) == 0 assert swh_storage.origin_count(".*user.*", regexp=True, with_visit=True) == 0 assert swh_storage.origin_count(".*user.*", regexp=False, with_visit=True) == 0 assert swh_storage.origin_count(".*user1.*", regexp=True, with_visit=True) == 0 assert swh_storage.origin_count(".*user1.*", regexp=False, with_visit=True) == 0 def test_origin_count_with_visit_with_visits_no_snapshot( self, swh_storage, sample_data ): - swh_storage.origin_add(sample_data["origin"]) + swh_storage.origin_add(sample_data.origins) origin_url = "https://github.com/user1/repo1" visit = OriginVisit(origin=origin_url, date=now(), type="git",) swh_storage.origin_visit_add([visit]) assert swh_storage.origin_count("github", with_visit=False) == 3 # it has a visit, but no snapshot, so with_visit=True => 0 assert swh_storage.origin_count("github", with_visit=True) == 0 assert swh_storage.origin_count("gitlab", with_visit=False) == 2 # these gitlab origins have no visit assert swh_storage.origin_count("gitlab", with_visit=True) == 0 assert ( swh_storage.origin_count("github.*user1", regexp=True, with_visit=False) == 1 ) assert ( swh_storage.origin_count("github.*user1", regexp=True, with_visit=True) == 0 ) assert swh_storage.origin_count("github", regexp=True, with_visit=True) == 0 def test_origin_count_with_visit_with_visits_and_snapshot( self, swh_storage, sample_data ): - snapshot = sample_data["snapshot"][0] - swh_storage.origin_add(sample_data["origin"]) + snapshot = sample_data.snapshot + swh_storage.origin_add(sample_data.origins) swh_storage.snapshot_add([snapshot]) origin_url = "https://github.com/user1/repo1" visit = OriginVisit(origin=origin_url, date=now(), type="git",) visit = swh_storage.origin_visit_add([visit])[0] swh_storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin_url, visit=visit.visit, date=now(), status="ongoing", snapshot=snapshot.id, ) ] ) assert swh_storage.origin_count("github", with_visit=False) == 3 # github/user1 has a visit and a snapshot, so with_visit=True => 1 assert swh_storage.origin_count("github", with_visit=True) == 1 assert ( swh_storage.origin_count("github.*user1", regexp=True, with_visit=False) == 1 ) assert ( swh_storage.origin_count("github.*user1", regexp=True, with_visit=True) == 1 ) assert swh_storage.origin_count("github", regexp=True, with_visit=True) == 1 @settings(suppress_health_check=[HealthCheck.too_slow]) @given(strategies.lists(objects(split_content=True), max_size=2)) def test_add_arbitrary(self, swh_storage, objects): for (obj_type, obj) in objects: if obj.object_type == "origin_visit": swh_storage.origin_add([Origin(url=obj.origin)]) visit = OriginVisit(origin=obj.origin, date=obj.date, type=obj.type,) swh_storage.origin_visit_add([visit]) else: method = getattr(swh_storage, obj_type + "_add") try: method([obj]) except HashCollision: pass @pytest.mark.db class TestLocalStorage: """Test the local storage""" # This test is only relevant on the local storage, with an actual # objstorage raising an exception def test_content_add_objstorage_exception(self, swh_storage, sample_data): - content = sample_data["content"][0] + content = sample_data.content swh_storage.objstorage.content_add = Mock( side_effect=Exception("mocked broken objstorage") ) with pytest.raises(Exception, match="mocked broken"): swh_storage.content_add([content]) missing = list(swh_storage.content_missing([content.hashes()])) assert missing == [content.sha1] @pytest.mark.db class TestStorageRaceConditions: @pytest.mark.xfail def test_content_add_race(self, swh_storage, sample_data): - content = sample_data["content"][0] + content = sample_data.content results = queue.Queue() def thread(): try: with db_transaction(swh_storage) as (db, cur): ret = swh_storage.content_add([content], db=db, cur=cur) results.put((threading.get_ident(), "data", ret)) except Exception as e: results.put((threading.get_ident(), "exc", e)) t1 = threading.Thread(target=thread) t2 = threading.Thread(target=thread) t1.start() # this avoids the race condition # import time # time.sleep(1) t2.start() t1.join() t2.join() r1 = results.get(block=False) r2 = results.get(block=False) with pytest.raises(queue.Empty): results.get(block=False) assert r1[0] != r2[0] assert r1[1] == "data", "Got exception %r in Thread%s" % (r1[2], r1[0]) assert r2[1] == "data", "Got exception %r in Thread%s" % (r2[2], r2[0]) @pytest.mark.db class TestPgStorage: """This class is dedicated for the rare case where the schema needs to be altered dynamically. Otherwise, the tests could be blocking when ran altogether. """ def test_content_update_with_new_cols(self, swh_storage, sample_data): - content, content2 = sample_data["content"][:2] + content, content2 = sample_data.contents[:2] swh_storage.journal_writer.journal = None # TODO, not supported with db_transaction(swh_storage) as (_, cur): cur.execute( """alter table content add column test text default null, add column test2 text default null""" ) swh_storage.content_add([content]) cont = content.to_dict() cont["test"] = "value-1" cont["test2"] = "value-2" swh_storage.content_update([cont], keys=["test", "test2"]) with db_transaction(swh_storage) as (_, cur): cur.execute( """SELECT sha1, sha1_git, sha256, length, status, test, test2 FROM content WHERE sha1 = %s""", (cont["sha1"],), ) datum = cur.fetchone() assert datum == ( cont["sha1"], cont["sha1_git"], cont["sha256"], cont["length"], "visible", cont["test"], cont["test2"], ) with db_transaction(swh_storage) as (_, cur): cur.execute( """alter table content drop column test, drop column test2""" ) def test_content_add_db(self, swh_storage, sample_data): - content = sample_data["content"][0] + content = sample_data.content actual_result = swh_storage.content_add([content]) assert actual_result == { "content:add": 1, "content:add:bytes": content.length, } if hasattr(swh_storage, "objstorage"): assert content.sha1 in swh_storage.objstorage.objstorage with db_transaction(swh_storage) as (_, cur): cur.execute( "SELECT sha1, sha1_git, sha256, length, status" " FROM content WHERE sha1 = %s", (content.sha1,), ) datum = cur.fetchone() assert datum == ( content.sha1, content.sha1_git, content.sha256, content.length, "visible", ) contents = [ obj for (obj_type, obj) in swh_storage.journal_writer.journal.objects if obj_type == "content" ] assert len(contents) == 1 assert contents[0] == attr.evolve(content, data=None) def test_content_add_metadata_db(self, swh_storage, sample_data): - content = attr.evolve(sample_data["content"][0], data=None, ctime=now()) + content = attr.evolve(sample_data.content, data=None, ctime=now()) actual_result = swh_storage.content_add_metadata([content]) assert actual_result == { "content:add": 1, } if hasattr(swh_storage, "objstorage"): assert content.sha1 not in swh_storage.objstorage.objstorage with db_transaction(swh_storage) as (_, cur): cur.execute( "SELECT sha1, sha1_git, sha256, length, status" " FROM content WHERE sha1 = %s", (content.sha1,), ) datum = cur.fetchone() assert datum == ( content.sha1, content.sha1_git, content.sha256, content.length, "visible", ) contents = [ obj for (obj_type, obj) in swh_storage.journal_writer.journal.objects if obj_type == "content" ] assert len(contents) == 1 assert contents[0] == content def test_skipped_content_add_db(self, swh_storage, sample_data): - content, cont2 = sample_data["skipped_content"][:2] + content, cont2 = sample_data.skipped_contents[:2] content2 = attr.evolve(cont2, blake2s256=None) actual_result = swh_storage.skipped_content_add([content, content, content2]) assert 2 <= actual_result.pop("skipped_content:add") <= 3 assert actual_result == {} with db_transaction(swh_storage) as (_, cur): cur.execute( "SELECT sha1, sha1_git, sha256, blake2s256, " "length, status, reason " "FROM skipped_content ORDER BY sha1_git" ) dbdata = cur.fetchall() assert len(dbdata) == 2 assert dbdata[0] == ( content.sha1, content.sha1_git, content.sha256, content.blake2s256, content.length, "absent", "Content too long", ) assert dbdata[1] == ( content2.sha1, content2.sha1_git, content2.sha256, content2.blake2s256, content2.length, "absent", "Content too long", ) def test_clear_buffers(self, swh_storage): """Calling clear buffers on real storage does nothing """ assert swh_storage.clear_buffers() is None def test_flush(self, swh_storage): """Calling clear buffers on real storage does nothing """ assert swh_storage.flush() == {} diff --git a/swh/storage/tests/test_storage_data.py b/swh/storage/tests/test_storage_data.py new file mode 100644 index 00000000..be9c7bce --- /dev/null +++ b/swh/storage/tests/test_storage_data.py @@ -0,0 +1,29 @@ +# Copyright (C) 2020 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from swh.model.model import BaseModel + +from swh.storage.tests.storage_data import StorageData + + +def test_storage_data(): + data = StorageData() + + for attribute_key in [ + "contents", + "skipped_contents", + "directories", + "revisions", + "releases", + "snapshots", + "origins", + "origin_visits", + "fetchers", + "authorities", + "origin_metadata", + "content_metadata", + ]: + for obj in getattr(data, attribute_key): + assert isinstance(obj, BaseModel)