Changeset View
Changeset View
Standalone View
Standalone View
swh/core/api/tests/test_serializers.py
# Copyright (C) 2015-2020 The Software Heritage developers | # Copyright (C) 2015-2020 The Software Heritage developers | ||||
# See the AUTHORS file at the top-level directory of this distribution | # See the AUTHORS file at the top-level directory of this distribution | ||||
# License: GNU General Public License version 3, or any later version | # License: GNU General Public License version 3, or any later version | ||||
# See top-level LICENSE file for more information | # See top-level LICENSE file for more information | ||||
import datetime | import datetime | ||||
import json | import json | ||||
from typing import Any, Callable, List, Tuple | from typing import Any, Callable, List, Tuple | ||||
import unittest | |||||
from uuid import UUID | from uuid import UUID | ||||
import pytest | import pytest | ||||
import arrow | import arrow | ||||
import requests | import requests | ||||
import requests_mock | |||||
from swh.core.api.serializers import ( | from swh.core.api.serializers import ( | ||||
SWHJSONDecoder, | SWHJSONDecoder, | ||||
SWHJSONEncoder, | SWHJSONEncoder, | ||||
msgpack_dumps, | msgpack_dumps, | ||||
msgpack_loads, | msgpack_loads, | ||||
decode_response, | decode_response, | ||||
) | ) | ||||
Show All 18 Lines | extra_encoders: List[Tuple[type, str, Callable[..., Any]]] = [ | ||||
(ExtraType, "extratype", lambda o: (o.arg1, o.arg2)) | (ExtraType, "extratype", lambda o: (o.arg1, o.arg2)) | ||||
] | ] | ||||
extra_decoders = { | extra_decoders = { | ||||
"extratype": lambda o: ExtraType(*o), | "extratype": lambda o: ExtraType(*o), | ||||
} | } | ||||
TZ = datetime.timezone(datetime.timedelta(minutes=118)) | |||||
class Serializers(unittest.TestCase): | DATA = { | ||||
def setUp(self): | |||||
self.tz = datetime.timezone(datetime.timedelta(minutes=118)) | |||||
self.data = { | |||||
"bytes": b"123456789\x99\xaf\xff\x00\x12", | "bytes": b"123456789\x99\xaf\xff\x00\x12", | ||||
"datetime_tz": datetime.datetime( | "datetime_tz": datetime.datetime(2015, 3, 4, 18, 25, 13, 1234, tzinfo=TZ,), | ||||
2015, 3, 4, 18, 25, 13, 1234, tzinfo=self.tz | |||||
), | |||||
"datetime_utc": datetime.datetime( | "datetime_utc": datetime.datetime( | ||||
2015, 3, 4, 18, 25, 13, 1234, tzinfo=datetime.timezone.utc | 2015, 3, 4, 18, 25, 13, 1234, tzinfo=datetime.timezone.utc | ||||
), | ), | ||||
"datetime_delta": datetime.timedelta(64), | "datetime_delta": datetime.timedelta(64), | ||||
"arrow_date": arrow.get("2018-04-25T16:17:53.533672+00:00"), | "arrow_date": arrow.get("2018-04-25T16:17:53.533672+00:00"), | ||||
"swhtype": "fake", | "swhtype": "fake", | ||||
"swh_dict": {"swhtype": 42, "d": "test"}, | "swh_dict": {"swhtype": 42, "d": "test"}, | ||||
"random_dict": {"swhtype": 43}, | "random_dict": {"swhtype": 43}, | ||||
"uuid": UUID("cdd8f804-9db6-40c3-93ab-5955d3836234"), | "uuid": UUID("cdd8f804-9db6-40c3-93ab-5955d3836234"), | ||||
} | } | ||||
self.encoded_data = { | ENCODED_DATA = { | ||||
vlorentz: make it a constant | |||||
Done Inline ActionsIs it really needed? it's only once below to avoid black explode the datetime into one line per datetime attribute... ardumont: Is it really needed?
it's only once below to avoid black explode the datetime into one line… | |||||
Not Done Inline ActionsI mean, it doesn't have to be a function/fixture. You can do DATA = { ... } at the toplevel. I don't see what black has to do with that vlorentz: I mean, it doesn't have to be a function/fixture. You can do
```
DATA = {
...
}
```
at… | |||||
Done Inline ActionsWe misunderstood each other ;). You were asking me to drop the data fixture altogether (to make its data as I was speaking only about the variable tz (which i thought you wanted as In the end, everything is fine. I did the fixture to constant change, it's ardumont: We misunderstood each other ;).
You were asking me to drop the data fixture altogether (to… | |||||
"bytes": {"swhtype": "bytes", "d": "F)}kWH8wXmIhn8j01^"}, | "bytes": {"swhtype": "bytes", "d": "F)}kWH8wXmIhn8j01^"}, | ||||
"datetime_tz": { | "datetime_tz": {"swhtype": "datetime", "d": "2015-03-04T18:25:13.001234+01:58",}, | ||||
"swhtype": "datetime", | "datetime_utc": {"swhtype": "datetime", "d": "2015-03-04T18:25:13.001234+00:00",}, | ||||
"d": "2015-03-04T18:25:13.001234+01:58", | |||||
}, | |||||
"datetime_utc": { | |||||
"swhtype": "datetime", | |||||
"d": "2015-03-04T18:25:13.001234+00:00", | |||||
}, | |||||
"datetime_delta": { | "datetime_delta": { | ||||
"swhtype": "timedelta", | "swhtype": "timedelta", | ||||
"d": {"days": 64, "seconds": 0, "microseconds": 0}, | "d": {"days": 64, "seconds": 0, "microseconds": 0}, | ||||
}, | }, | ||||
"arrow_date": {"swhtype": "arrow", "d": "2018-04-25T16:17:53.533672+00:00"}, | "arrow_date": {"swhtype": "arrow", "d": "2018-04-25T16:17:53.533672+00:00"}, | ||||
"swhtype": "fake", | "swhtype": "fake", | ||||
"swh_dict": {"swhtype": 42, "d": "test"}, | "swh_dict": {"swhtype": 42, "d": "test"}, | ||||
"random_dict": {"swhtype": 43}, | "random_dict": {"swhtype": 43}, | ||||
"uuid": {"swhtype": "uuid", "d": "cdd8f804-9db6-40c3-93ab-5955d3836234"}, | "uuid": {"swhtype": "uuid", "d": "cdd8f804-9db6-40c3-93ab-5955d3836234"}, | ||||
} | } | ||||
self.legacy_msgpack = { | |||||
def test_serializers_round_trip_json(): | |||||
json_data = json.dumps(DATA, cls=SWHJSONEncoder) | |||||
actual_data = json.loads(json_data, cls=SWHJSONDecoder) | |||||
assert actual_data == DATA | |||||
def test_serializers_round_trip_json_extra_types(): | |||||
expected_original_data = [ExtraType("baz", DATA), "qux"] | |||||
data = json.dumps( | |||||
expected_original_data, cls=SWHJSONEncoder, extra_encoders=extra_encoders | |||||
) | |||||
actual_data = json.loads(data, cls=SWHJSONDecoder, extra_decoders=extra_decoders) | |||||
assert actual_data == expected_original_data | |||||
def test_serializers_encode_swh_json(): | |||||
json_str = json.dumps(DATA, cls=SWHJSONEncoder) | |||||
actual_data = json.loads(json_str) | |||||
assert actual_data == ENCODED_DATA | |||||
def test_serializers_round_trip_msgpack(): | |||||
expected_original_data = { | |||||
**DATA, | |||||
"none_dict_key": {None: 42}, | |||||
"long_int_is_loooong": 10000000000000000000000000000000, | |||||
} | |||||
data = msgpack_dumps(expected_original_data) | |||||
actual_data = msgpack_loads(data) | |||||
assert actual_data == expected_original_data | |||||
def test_serializers_round_trip_msgpack_extra_types(): | |||||
original_data = [ExtraType("baz", DATA), "qux"] | |||||
data = msgpack_dumps(original_data, extra_encoders=extra_encoders) | |||||
actual_data = msgpack_loads(data, extra_decoders=extra_decoders) | |||||
assert actual_data == original_data | |||||
def test_serializers_generator_json(): | |||||
data = json.dumps((i for i in range(5)), cls=SWHJSONEncoder) | |||||
assert json.loads(data, cls=SWHJSONDecoder) == [i for i in range(5)] | |||||
def test_serializers_generator_msgpack(): | |||||
data = msgpack_dumps((i for i in range(5))) | |||||
assert msgpack_loads(data) == [i for i in range(5)] | |||||
def test_serializers_decode_response_json(requests_mock): | |||||
requests_mock.get( | |||||
"https://example.org/test/data", | |||||
json=ENCODED_DATA, | |||||
headers={"content-type": "application/json"}, | |||||
) | |||||
response = requests.get("https://example.org/test/data") | |||||
assert decode_response(response) == DATA | |||||
def test_serializers_decode_legacy_msgpack(): | |||||
legacy_msgpack = { | |||||
"bytes": b"\xc4\x0e123456789\x99\xaf\xff\x00\x12", | "bytes": b"\xc4\x0e123456789\x99\xaf\xff\x00\x12", | ||||
"datetime_tz": ( | "datetime_tz": ( | ||||
b"\x82\xc4\x0c__datetime__\xc3\xc4\x01s\xd9 " | b"\x82\xc4\x0c__datetime__\xc3\xc4\x01s\xd9 " | ||||
b"2015-03-04T18:25:13.001234+01:58" | b"2015-03-04T18:25:13.001234+01:58" | ||||
), | ), | ||||
"datetime_utc": ( | "datetime_utc": ( | ||||
b"\x82\xc4\x0c__datetime__\xc3\xc4\x01s\xd9 " | b"\x82\xc4\x0c__datetime__\xc3\xc4\x01s\xd9 " | ||||
b"2015-03-04T18:25:13.001234+00:00" | b"2015-03-04T18:25:13.001234+00:00" | ||||
), | ), | ||||
"datetime_delta": ( | "datetime_delta": ( | ||||
b"\x82\xc4\r__timedelta__\xc3\xc4\x01s\x83\xa4" | b"\x82\xc4\r__timedelta__\xc3\xc4\x01s\x83\xa4" | ||||
b"days@\xa7seconds\x00\xacmicroseconds\x00" | b"days@\xa7seconds\x00\xacmicroseconds\x00" | ||||
), | ), | ||||
"arrow_date": ( | "arrow_date": ( | ||||
b"\x82\xc4\t__arrow__\xc3\xc4\x01s\xd9 " | b"\x82\xc4\t__arrow__\xc3\xc4\x01s\xd9 2018-04-25T16:17:53.533672+00:00" | ||||
b"2018-04-25T16:17:53.533672+00:00" | |||||
), | ), | ||||
"swhtype": b"\xa4fake", | "swhtype": b"\xa4fake", | ||||
"swh_dict": b"\x82\xa7swhtype*\xa1d\xa4test", | "swh_dict": b"\x82\xa7swhtype*\xa1d\xa4test", | ||||
"random_dict": b"\x81\xa7swhtype+", | "random_dict": b"\x81\xa7swhtype+", | ||||
"uuid": ( | "uuid": ( | ||||
b"\x82\xc4\x08__uuid__\xc3\xc4\x01s\xd9$" | b"\x82\xc4\x08__uuid__\xc3\xc4\x01s\xd9$" | ||||
Not Done Inline Actionsconcat vlorentz: concat | |||||
b"cdd8f804-9db6-40c3-93ab-5955d3836234" | b"cdd8f804-9db6-40c3-93ab-5955d3836234" | ||||
), | ), | ||||
} | } | ||||
for k, v in legacy_msgpack.items(): | |||||
assert msgpack_loads(v) == DATA[k] | |||||
self.generator = (i for i in range(5)) | |||||
self.gen_lst = list(range(5)) | |||||
def test_round_trip_json(self): | |||||
data = json.dumps(self.data, cls=SWHJSONEncoder) | |||||
self.assertEqual(self.data, json.loads(data, cls=SWHJSONDecoder)) | |||||
def test_round_trip_json_extra_types(self): | |||||
original_data = [ExtraType("baz", self.data), "qux"] | |||||
data = json.dumps( | |||||
original_data, cls=SWHJSONEncoder, extra_encoders=extra_encoders | |||||
) | |||||
self.assertEqual( | |||||
original_data, | |||||
json.loads(data, cls=SWHJSONDecoder, extra_decoders=extra_decoders), | |||||
) | |||||
def test_encode_swh_json(self): | |||||
data = json.dumps(self.data, cls=SWHJSONEncoder) | |||||
self.assertEqual(self.encoded_data, json.loads(data)) | |||||
def test_round_trip_msgpack(self): | |||||
original_data = { | |||||
**self.data, | |||||
"none_dict_key": {None: 42}, | |||||
"long_int_is_loooong": 10000000000000000000000000000000, | |||||
} | |||||
data = msgpack_dumps(original_data) | |||||
self.assertEqual(original_data, msgpack_loads(data)) | |||||
def test_round_trip_msgpack_extra_types(self): | def test_serializers_encode_native_datetime(): | ||||
original_data = [ExtraType("baz", self.data), "qux"] | |||||
data = msgpack_dumps(original_data, extra_encoders=extra_encoders) | |||||
self.assertEqual( | |||||
original_data, msgpack_loads(data, extra_decoders=extra_decoders) | |||||
) | |||||
def test_generator_json(self): | |||||
data = json.dumps(self.generator, cls=SWHJSONEncoder) | |||||
self.assertEqual(self.gen_lst, json.loads(data, cls=SWHJSONDecoder)) | |||||
def test_generator_msgpack(self): | |||||
data = msgpack_dumps(self.generator) | |||||
self.assertEqual(self.gen_lst, msgpack_loads(data)) | |||||
@requests_mock.Mocker() | |||||
def test_decode_response_json(self, mock_requests): | |||||
mock_requests.get( | |||||
"https://example.org/test/data", | |||||
json=self.encoded_data, | |||||
headers={"content-type": "application/json"}, | |||||
) | |||||
response = requests.get("https://example.org/test/data") | |||||
assert decode_response(response) == self.data | |||||
def test_decode_legacy_msgpack(self): | |||||
for k, v in self.legacy_msgpack.items(): | |||||
assert msgpack_loads(v) == self.data[k] | |||||
def test_encode_native_datetime(self): | |||||
dt = datetime.datetime(2015, 1, 1, 12, 4, 42, 231455) | dt = datetime.datetime(2015, 1, 1, 12, 4, 42, 231455) | ||||
with pytest.raises(ValueError, match="naive datetime"): | with pytest.raises(ValueError, match="naive datetime"): | ||||
msgpack_dumps(dt) | msgpack_dumps(dt) | ||||
def test_decode_naive_datetime(self): | |||||
def test_serializers_decode_naive_datetime(): | |||||
expected_dt = datetime.datetime(2015, 1, 1, 12, 4, 42, 231455) | expected_dt = datetime.datetime(2015, 1, 1, 12, 4, 42, 231455) | ||||
# Current encoding | # Current encoding | ||||
assert ( | assert ( | ||||
msgpack_loads( | msgpack_loads( | ||||
b"\x82\xc4\x07swhtype\xa8datetime\xc4\x01d\xba" | b"\x82\xc4\x07swhtype\xa8datetime\xc4\x01d\xba" | ||||
b"2015-01-01T12:04:42.231455" | b"2015-01-01T12:04:42.231455" | ||||
) | ) | ||||
== expected_dt | == expected_dt | ||||
) | ) | ||||
# Legacy encoding | # Legacy encoding | ||||
assert ( | assert ( | ||||
msgpack_loads( | msgpack_loads( | ||||
Not Done Inline Actionsconcat vlorentz: concat | |||||
b"\x82\xc4\x0c__datetime__\xc3\xc4\x01s\xba" | b"\x82\xc4\x0c__datetime__\xc3\xc4\x01s\xba2015-01-01T12:04:42.231455" | ||||
b"2015-01-01T12:04:42.231455" | |||||
) | ) | ||||
== expected_dt | == expected_dt | ||||
) | ) |
make it a constant