Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_default_batch_size_limit():
# First message can be added even if it's too big
builder = DefaultRecordBatchBuilder(
magic=2, compression_type=0, is_transactional=0,
producer_id=-1, producer_epoch=-1, base_sequence=-1,
batch_size=1024)
meta = builder.append(
0, timestamp=None, key=None, value=b"M" * 2000, headers=[])
assert meta.size > 0
assert meta.crc is None
assert meta.offset == 0
assert meta.timestamp is not None
assert len(builder.build()) > 2000
builder = DefaultRecordBatchBuilder(
magic=2, compression_type=0, is_transactional=0,
producer_id=-1, producer_epoch=-1, base_sequence=-1,
batch_size=1024)
meta = builder.append(
0, timestamp=None, key=None, value=b"M" * 700, headers=[])
assert meta is not None
meta = builder.append(
1, timestamp=None, key=None, value=b"M" * 700, headers=[])
assert meta is None
meta = builder.append(
2, timestamp=None, key=None, value=b"M" * 700, headers=[])
assert meta is None
assert len(builder.build()) < 1000
def test_default_batch_builder_validates_arguments():
builder = DefaultRecordBatchBuilder(
magic=2, compression_type=0, is_transactional=0,
producer_id=-1, producer_epoch=-1, base_sequence=-1,
batch_size=999999)
# Key should not be str
with pytest.raises(TypeError):
builder.append(
0, timestamp=9999999, key="some string", value=None, headers=[])
# Value should not be str
with pytest.raises(TypeError):
builder.append(
0, timestamp=9999999, key=None, value="some string", headers=[])
# Timestamp should be of proper type
with pytest.raises(TypeError):
def test_build_without_append():
builder = DefaultRecordBatchBuilder(
magic=2, compression_type=0, is_transactional=1,
producer_id=123456, producer_epoch=123, base_sequence=9999,
batch_size=999999)
buffer = builder.build()
reader = DefaultRecordBatch(bytes(buffer))
msgs = list(reader)
assert not msgs
def test_estimate_size_in_bytes_bigger_than_batch_v2():
key = b"Super Key"
value = b"1" * 100
headers = [("header1", b"aaa"), ("header2", b"bbb")]
estimate_size = DefaultRecordBatchBuilder.estimate_size_in_bytes(
key, value, headers)
builder = DefaultRecordBatchBuilder(
magic=2, compression_type=0, is_transactional=0,
producer_id=-1, producer_epoch=-1, base_sequence=-1,
batch_size=999999)
builder.append(
0, timestamp=9999999, key=key, value=value, headers=headers)
buf = builder.build()
assert len(buf) <= estimate_size, \
"Estimate should always be upper bound"
def test_default_correct_metadata_response():
builder = DefaultRecordBatchBuilder(
magic=2, compression_type=0, is_transactional=0,
producer_id=-1, producer_epoch=-1, base_sequence=-1,
batch_size=1024 * 1024)
meta = builder.append(
0, timestamp=9999999, key=b"test", value=b"Super", headers=[])
assert meta.offset == 0
assert meta.timestamp == 9999999
assert meta.crc is None
assert meta.size == 16
assert repr(meta) == (
"DefaultRecordMetadata(offset=0, size={}, timestamp={})"
.format(meta.size, meta.timestamp)
)
def test_read_write_serde_v2(compression_type, crc):
builder = DefaultRecordBatchBuilder(
magic=2, compression_type=compression_type, is_transactional=1,
producer_id=123456, producer_epoch=123, base_sequence=9999,
batch_size=999999)
headers = [("header1", b"aaa"), ("header2", b"bbb")]
for offset in range(10):
builder.append(
offset, timestamp=9999999 + offset, key=b"test", value=b"Super",
headers=headers)
buffer = builder.build()
reader = DefaultRecordBatch(bytes(buffer))
assert reader.validate_crc()
msgs = list(reader)
assert reader.is_transactional is True
assert reader.is_control_batch is False
assert reader.compression_type == compression_type
def test_written_bytes_equals_size_in_bytes_v2():
key = b"test"
value = b"Super"
headers = [("header1", b"aaa"), ("header2", b"bbb"), ("xx", None)]
builder = DefaultRecordBatchBuilder(
magic=2, compression_type=0, is_transactional=0,
producer_id=-1, producer_epoch=-1, base_sequence=-1,
batch_size=999999)
size_in_bytes = builder.size_in_bytes(
0, timestamp=9999999, key=key, value=value, headers=headers)
pos = builder.size()
meta = builder.append(
0, timestamp=9999999, key=key, value=value, headers=headers)
assert builder.size() - pos == size_in_bytes
assert meta.size == size_in_bytes
def test_set_producer_state():
builder = DefaultRecordBatchBuilder(
magic=2, compression_type=0, is_transactional=0,
producer_id=-1, producer_epoch=-1, base_sequence=-1,
batch_size=999999)
builder.set_producer_state(
producer_id=700,
producer_epoch=5,
base_sequence=17)
assert builder.producer_id == 700
buffer = builder.build()
reader = DefaultRecordBatch(bytes(buffer))
assert reader.producer_id == 700
assert reader.producer_epoch == 5
assert reader.base_sequence == 17
def __init__(self, magic, batch_size, compression_type,
*, is_transactional):
if magic < 2:
assert not is_transactional
self._builder = LegacyRecordBatchBuilder(
magic, compression_type, batch_size)
else:
self._builder = DefaultRecordBatchBuilder(
magic, compression_type, is_transactional=is_transactional,
producer_id=-1, producer_epoch=-1, base_sequence=0,
batch_size=batch_size)
self._relative_offset = 0
self._buffer = None
self._closed = False
def _set_producer_state(self, producer_id, producer_epoch, base_sequence):
assert type(self._builder) is DefaultRecordBatchBuilder
self._builder.set_producer_state(
producer_id, producer_epoch, base_sequence)