Page MenuHomeSoftware Heritage
Paste P1073

Cannot add revisions in swh.storage (atleast when using in_memory implementation)
ActivePublic

Authored by KShivendu on Jun 18 2021, 4:19 PM.
(swh2) shivendu@swh-self-hosted:~/swh-environment/swh-search$ pytest -sv swh/search/tests/test_journal_client.py
================================================== test session starts ==================================================
platform linux -- Python 3.7.5, pytest-6.2.4, py-1.10.0, pluggy-0.13.1 -- /home/shivendu/.virtualenvs/swh2/bin/python3
cachedir: .pytest_cache
hypothesis profile 'default' -> database=DirectoryBasedExampleDatabase('/home/shivendu/swh-environment/swh-search/.hypothesis/examples')
rootdir: /home/shivendu/swh-environment/swh-search, configfile: pytest.ini
plugins: redis-2.1.0, xdist-2.2.1, mock-3.6.1, dash-1.20.0, requests-mock-1.9.2, hypothesis-6.13.4, forked-1.3.0, django-4.3.0, asyncio-0.15.1, postgresql-2.6.1, django-test-migrations-1.1.0, flask-1.2.0, swh.core-0.14.2.dev1+gc3c387d, swh.journal-0.7.2.dev8+g2972c7a
collected 4 items
swh/search/tests/test_journal_client.py::test_journal_client_origin_from_journal PASSED
swh/search/tests/test_journal_client.py::test_journal_client_origin_visit_from_journal PASSED
swh/search/tests/test_journal_client.py::test_journal_client_origin_visit_status_from_journal FAILED
swh/search/tests/test_journal_client.py::test_journal_client_origin_metadata_from_journal PASSED
======================================================= FAILURES ========================================================
_________________________________ test_journal_client_origin_visit_status_from_journal __________________________________
def test_journal_client_origin_visit_status_from_journal():
search_mock = MagicMock()
storage = get_storage('memory')
DATES = [
TimestampWithTimezone(
timestamp=Timestamp(seconds=1234567891, microseconds=0,),
offset=120,
negative_utc=False,
),
TimestampWithTimezone(
timestamp=Timestamp(seconds=1234567892, microseconds=0,),
offset=120,
negative_utc=False,
),
]
COMMITTERS = [
Person(fullname=b"foo", name=b"foo", email=b""),
Person(fullname=b"bar", name=b"bar", email=b""),
]
REVISIONS = [
Release(
id=hash_to_bytes("8059dc4e17fcd0e51ca3bcd6b80f4577d281fd08"),
name=b"v0.0.1",
date=TimestampWithTimezone(
timestamp=Timestamp(seconds=1234567890, microseconds=0,),
offset=120,
negative_utc=False,
),
author=COMMITTERS[0],
target_type=ObjectType.REVISION,
target=b"\x04" * 20,
message=b"foo",
synthetic=False,
),
Release(
id=hash_to_bytes("ee4d20e80af850cc0f417d25dc5073792c5010d2"),
name=b"this-is-a/tag/1.0",
date=None,
author=None,
target_type=ObjectType.DIRECTORY,
target=b"\x05" * 20,
message=b"bar",
synthetic=False,
),
]
RELEASES = [
Release(
id=hash_to_bytes("8059dc4e17fcd0e51ca3bcd6b80f4577d281fd08"),
name=b"v0.0.1",
date=TimestampWithTimezone(
timestamp=Timestamp(seconds=1234567890, microseconds=0,),
offset=120,
negative_utc=False,
),
author=COMMITTERS[0],
target_type=ObjectType.REVISION,
target=b"\x04" * 20,
message=b"foo",
synthetic=False,
),
Release(
id=hash_to_bytes("ee4d20e80af850cc0f417d25dc5073792c5010d2"),
name=b"this-is-a/tag/1.0",
date=None,
author=None,
target_type=ObjectType.DIRECTORY,
target=b"\x05" * 20,
message=b"bar",
synthetic=False,
),
]
SNAPSHOTS = [
Snapshot(
id=hash_to_bytes("0e7f84ede9a254f2cd55649ad5240783f557e65f"),
branches={
b"target/revision1": SnapshotBranch(
target_type=TargetType.REVISION, target=REVISIONS[0].id,
),
b"target/revision2": SnapshotBranch(
target_type=TargetType.REVISION, target=REVISIONS[0].id,
),
b"target/revision3": SnapshotBranch(
target_type=TargetType.REVISION, target=REVISIONS[0].id,
),
b"target/release1": SnapshotBranch(
target_type=TargetType.RELEASE, target=RELEASES[0].id
),
b"target/release2": SnapshotBranch(
target_type=TargetType.RELEASE, target=RELEASES[0].id
),
b"target/release3": SnapshotBranch(
target_type=TargetType.RELEASE, target=RELEASES[0].id
),
b"target/alias": SnapshotBranch(
target_type=TargetType.ALIAS, target=b"target/revision"
),
},
),
]
> storage.revision_add(REVISIONS)
swh/search/tests/test_journal_client.py:160:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../swh-storage/swh/storage/cassandra/storage.py:568: in revision_add
revobject = converters.revision_to_db(revision)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
revision = Release(name=b'v0.0.1', message=b'foo', target=b'\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\...et=120, negative_utc=False), metadata=None, id=b'\x80Y\xdcN\x17\xfc\xd0\xe5\x1c\xa3\xbc\xd6\xb8\x0fEw\xd2\x81\xfd\x08')
def revision_to_db(revision: Revision) -> RevisionRow:
# we use a deepcopy of the dict because we do not want to recurse the
# Model->dict conversion (to keep Timestamp & al. entities), BUT we do not
# want to modify original metadata (embedded in the Model entity), so we
# non-recursively convert it as a dict but make a deep copy.
db_revision = deepcopy(attr.asdict(revision, recurse=False))
metadata = revision.metadata
> extra_headers = revision.extra_headers
E AttributeError: 'Release' object has no attribute 'extra_headers'
../swh-storage/swh/storage/cassandra/converters.py:35: AttributeError
================================================ short test summary info ================================================
FAILED swh/search/tests/test_journal_client.py::test_journal_client_origin_visit_status_from_journal - AttributeError:...
============================================== 1 failed, 3 passed in 0.50s ==============================================

Event Timeline

KShivendu created this object in space S1 Public.