Changeset View
Changeset View
Standalone View
Standalone View
swh/core/api/tests/test_serializers.py
# Copyright (C) 2015-2020 The Software Heritage developers | # Copyright (C) 2015-2020 The Software Heritage developers | ||||
# See the AUTHORS file at the top-level directory of this distribution | # See the AUTHORS file at the top-level directory of this distribution | ||||
# License: GNU General Public License version 3, or any later version | # License: GNU General Public License version 3, or any later version | ||||
# See top-level LICENSE file for more information | # See top-level LICENSE file for more information | ||||
import datetime | import datetime | ||||
import json | import json | ||||
from typing import Any, Callable, List, Tuple | from typing import Any, Callable, List, Tuple, Union | ||||
from arrow import Arrow | |||||
from uuid import UUID | from uuid import UUID | ||||
import pytest | import pytest | ||||
import arrow | import arrow | ||||
import requests | import requests | ||||
from swh.core.api.classes import PagedResult | |||||
from swh.core.api.serializers import ( | from swh.core.api.serializers import ( | ||||
SWHJSONDecoder, | SWHJSONDecoder, | ||||
SWHJSONEncoder, | SWHJSONEncoder, | ||||
msgpack_dumps, | msgpack_dumps, | ||||
msgpack_loads, | msgpack_loads, | ||||
decode_response, | decode_response, | ||||
) | ) | ||||
Show All 19 Lines | |||||
extra_decoders = { | extra_decoders = { | ||||
"extratype": lambda o: ExtraType(*o), | "extratype": lambda o: ExtraType(*o), | ||||
} | } | ||||
TZ = datetime.timezone(datetime.timedelta(minutes=118)) | TZ = datetime.timezone(datetime.timedelta(minutes=118)) | ||||
DATA_BYTES = b"123456789\x99\xaf\xff\x00\x12" | |||||
ENCODED_DATA_BYTES = {"swhtype": "bytes", "d": "F)}kWH8wXmIhn8j01^"} | |||||
DATA_DATETIME = datetime.datetime(2015, 3, 4, 18, 25, 13, 1234, tzinfo=TZ,) | |||||
ENCODED_DATA_DATETIME = { | |||||
"swhtype": "datetime", | |||||
"d": "2015-03-04T18:25:13.001234+01:58", | |||||
} | |||||
DATA_TIMEDELTA = datetime.timedelta(64) | |||||
ENCODED_DATA_TIMEDELTA = { | |||||
"swhtype": "timedelta", | |||||
"d": {"days": 64, "seconds": 0, "microseconds": 0}, | |||||
} | |||||
DATA_ARROW = arrow.get("2018-04-25T16:17:53.533672+00:00") | |||||
ENCODED_DATA_ARROW = {"swhtype": "arrow", "d": "2018-04-25T16:17:53.533672+00:00"} | |||||
DATA_UUID = UUID("cdd8f804-9db6-40c3-93ab-5955d3836234") | |||||
ENCODED_DATA_UUID = {"swhtype": "uuid", "d": "cdd8f804-9db6-40c3-93ab-5955d3836234"} | |||||
# For test demonstration purposes | |||||
TestPagedResultStr = PagedResult[ | |||||
Union[UUID, datetime.datetime, datetime.timedelta], str | |||||
] | |||||
DATA_PAGED_RESULT = TestPagedResultStr( | |||||
results=[DATA_UUID, DATA_DATETIME, DATA_TIMEDELTA], next_page_token="10", | |||||
) | |||||
ENCODED_DATA_PAGED_RESULT = { | |||||
"d": { | |||||
"results": [ENCODED_DATA_UUID, ENCODED_DATA_DATETIME, ENCODED_DATA_TIMEDELTA,], | |||||
"next_page_token": "10", | |||||
}, | |||||
"swhtype": "paged_result", | |||||
} | |||||
TestPagedResultTuple = PagedResult[Union[str, bytes, Arrow], List[Union[str, UUID]]] | |||||
DATA_PAGED_RESULT2 = TestPagedResultTuple( | |||||
results=["data0", DATA_BYTES, DATA_ARROW], next_page_token=["10", DATA_UUID], | |||||
) | |||||
ENCODED_DATA_PAGED_RESULT2 = { | |||||
"d": { | |||||
"results": ["data0", ENCODED_DATA_BYTES, ENCODED_DATA_ARROW,], | |||||
"next_page_token": ["10", ENCODED_DATA_UUID], | |||||
}, | |||||
"swhtype": "paged_result", | |||||
} | |||||
DATA = { | DATA = { | ||||
"bytes": b"123456789\x99\xaf\xff\x00\x12", | "bytes": DATA_BYTES, | ||||
"datetime_tz": datetime.datetime(2015, 3, 4, 18, 25, 13, 1234, tzinfo=TZ,), | "datetime_tz": DATA_DATETIME, | ||||
"datetime_utc": datetime.datetime( | "datetime_utc": datetime.datetime( | ||||
2015, 3, 4, 18, 25, 13, 1234, tzinfo=datetime.timezone.utc | 2015, 3, 4, 18, 25, 13, 1234, tzinfo=datetime.timezone.utc | ||||
), | ), | ||||
"datetime_delta": datetime.timedelta(64), | "datetime_delta": DATA_TIMEDELTA, | ||||
"arrow_date": arrow.get("2018-04-25T16:17:53.533672+00:00"), | "arrow_date": DATA_ARROW, | ||||
"swhtype": "fake", | "swhtype": "fake", | ||||
"swh_dict": {"swhtype": 42, "d": "test"}, | "swh_dict": {"swhtype": 42, "d": "test"}, | ||||
"random_dict": {"swhtype": 43}, | "random_dict": {"swhtype": 43}, | ||||
"uuid": UUID("cdd8f804-9db6-40c3-93ab-5955d3836234"), | "uuid": DATA_UUID, | ||||
"paged-result": DATA_PAGED_RESULT, | |||||
"paged-result2": DATA_PAGED_RESULT2, | |||||
} | } | ||||
ENCODED_DATA = { | ENCODED_DATA = { | ||||
"bytes": {"swhtype": "bytes", "d": "F)}kWH8wXmIhn8j01^"}, | "bytes": ENCODED_DATA_BYTES, | ||||
"datetime_tz": {"swhtype": "datetime", "d": "2015-03-04T18:25:13.001234+01:58",}, | "datetime_tz": ENCODED_DATA_DATETIME, | ||||
"datetime_utc": {"swhtype": "datetime", "d": "2015-03-04T18:25:13.001234+00:00",}, | "datetime_utc": {"swhtype": "datetime", "d": "2015-03-04T18:25:13.001234+00:00",}, | ||||
"datetime_delta": { | "datetime_delta": ENCODED_DATA_TIMEDELTA, | ||||
"swhtype": "timedelta", | "arrow_date": ENCODED_DATA_ARROW, | ||||
"d": {"days": 64, "seconds": 0, "microseconds": 0}, | |||||
}, | |||||
"arrow_date": {"swhtype": "arrow", "d": "2018-04-25T16:17:53.533672+00:00"}, | |||||
"swhtype": "fake", | "swhtype": "fake", | ||||
"swh_dict": {"swhtype": 42, "d": "test"}, | "swh_dict": {"swhtype": 42, "d": "test"}, | ||||
"random_dict": {"swhtype": 43}, | "random_dict": {"swhtype": 43}, | ||||
"uuid": {"swhtype": "uuid", "d": "cdd8f804-9db6-40c3-93ab-5955d3836234"}, | "uuid": ENCODED_DATA_UUID, | ||||
"paged-result": ENCODED_DATA_PAGED_RESULT, | |||||
"paged-result2": ENCODED_DATA_PAGED_RESULT2, | |||||
} | } | ||||
ardumont: Just to show that the nesting encoding/decoding actually works already.
Which is nice, nothing… | |||||
def test_serializers_round_trip_json(): | def test_serializers_round_trip_json(): | ||||
json_data = json.dumps(DATA, cls=SWHJSONEncoder) | json_data = json.dumps(DATA, cls=SWHJSONEncoder) | ||||
actual_data = json.loads(json_data, cls=SWHJSONDecoder) | actual_data = json.loads(json_data, cls=SWHJSONDecoder) | ||||
assert actual_data == DATA | assert actual_data == DATA | ||||
▲ Show 20 Lines • Show All 109 Lines • Show Last 20 Lines |
Just to show that the nesting encoding/decoding actually works already.
Which is nice, nothing fancy to do to deal with our data model object (for D3627 for example ;).