kafka_prefix = 'zngblioqkl.swh.journal.objects'
kafka_server = (<subprocess.Popen object at 0x7f57618ce438>, 24787)
consumer = <cimpl.Consumer object at 0x7f576196fd90>
large_directories = [Directory(entries=[DirectoryEntry(name=b'000000000', type='file', target=b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x...x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', perms=33188)], id=b').\x887-c?\x10\x9eRa\x93\t\x16\xeb_\x02\xbaRz'), ...]
@pytest.mark.parametrize(
"kafka_server_config_overrides", [{"message.max.bytes": str(SMALL_MESSAGE_SIZE)}]
)
def test_fail_write_large_objects(
kafka_prefix: str,
kafka_server: Tuple[Popen, int],
consumer: Consumer,
large_directories: List[Directory],
):
kafka_prefix += ".swh.journal.objects"
# Needed as there is no directories in OBJECT_TYPES
consumer.subscribe([kafka_prefix + ".directory"])
writer = KafkaJournalWriter(
brokers=["localhost:%d" % kafka_server[1]],
client_id="kafka_writer",
prefix=kafka_prefix,
)
def dir_message_size(directory: Directory) -> int:
return (
1
+ 1 # top level fixmap overhead
+ 2
+ 2 # id header: len("id")
+ 20
+ 1 # bin8 for 20 byte directory id
+ 7
+ 1 # entries header: len("entries")
+ 4
+ len(directory.entries) # array32 header, worst case scenario
* (
1
+ 1 # fixmap overhead
+ 6
+ 1 # target header
+ 20
+ 1 # bin8 for target value
+ 4
+ 1 # name header
+ 9
+ 1 # bin8 for name value
+ 5
+ 1 # perms header
+ 4
+ 1 # int32 perms
+ 4
+ 1 # type header
+ 4 # str4 for type value
)
)
expected_dirs = []
for directory in large_directories:
if dir_message_size(directory) < SMALL_MESSAGE_SIZE:
# No error; write anyway, but continue
writer.write_addition("directory", directory)
expected_dirs.append(directory)
continue
with pytest.raises(KafkaDeliveryError) as exc:
> writer.write_addition("directory", directory)
.tox/py3/lib/python3.7/site-packages/swh/journal/tests/test_kafka_writer.py:282:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/py3/lib/python3.7/site-packages/swh/journal/writer/kafka.py:280: in write_addition
self.flush()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <swh.journal.writer.kafka.KafkaJournalWriter object at 0x7f57064bcba8>
def flush(self):
start = time.monotonic()
self.producer.flush(self.flush_timeout)
while self.deliveries_pending:
if time.monotonic() - start > self.flush_timeout:
break
self.producer.poll(0.1)
if self.deliveries_pending:
# Delivery timeout
raise self.delivery_error(
"flush() exceeded timeout (%ss)" % self.flush_timeout,
)
elif self.delivery_failures:
raise self.delivery_error(
> "Failed deliveries after flush()", self.failed_deliveries(),
)
E AttributeError: 'KafkaJournalWriter' object has no attribute 'failed_deliveries'
.tox/py3/lib/python3.7/site-packages/swh/journal/writer/kafka.py:216: AttributeError
TEST RESULT
TEST RESULT
- Run At
- Apr 9 2020, 2:36 PM