Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_roundtrip_multiframe_3(data):
nframes = 4
compressed = b''
ctx = lz4frame.create_compression_context()
for _ in range(nframes):
compressed += lz4frame.compress_begin(ctx)
compressed += lz4frame.compress_chunk(ctx, data)
compressed += lz4frame.compress_flush(ctx)
decompressed = b''
ctx = lz4frame.create_decompression_context()
for _ in range(nframes):
d, bytes_read, eof = lz4frame.decompress_chunk(ctx, compressed)
decompressed += d
assert eof == True
assert bytes_read == len(compressed) // nframes
assert len(decompressed) == nframes * len(data)
assert data * nframes == decompressed
def test_roundtrip_chunked(data_chunked, block_size, block_mode,
content_checksum, frame_type,
compression_level, auto_flush):
data, c_chunks, d_chunks = data_chunked
c_context = lz4frame.create_compression_context()
compressed = lz4frame.compress_begin(
c_context,
source_size=len(data),
compression_level=compression_level,
block_size=block_size,
content_checksum=content_checksum,
frame_type=frame_type,
auto_flush=auto_flush
)
data_in = get_chunked(data, c_chunks)
try:
while True:
compressed += lz4frame.compress_update(
c_context,
next(data_in)
)
except StopIteration:
data, c_chunks, d_chunks = data
c_context = lz4frame.create_compression_context()
kwargs = {}
kwargs['compression_level'] = compression_level
kwargs['block_size'] = block_size
kwargs['block_linked'] = block_linked
kwargs['content_checksum'] = content_checksum
kwargs['block_checksum'] = block_checksum
kwargs['auto_flush'] = auto_flush
if store_size is True:
kwargs['source_size'] = len(data)
compressed = lz4frame.compress_begin(
c_context,
**kwargs
)
data_in = get_chunked(data, c_chunks)
try:
while True:
compressed += lz4frame.compress_chunk(
c_context,
next(data_in)
)
except StopIteration:
pass
finally:
del data_in
compressed += lz4frame.compress_flush(c_context)
def test_roundtrip(data, block_size, block_mode,
content_checksum, frame_type,
compression_level, auto_flush):
c_context = lz4frame.create_compression_context()
compressed = lz4frame.compress_begin(
c_context,
source_size=len(data),
compression_level=compression_level,
block_size=block_size,
content_checksum=content_checksum,
frame_type=frame_type,
auto_flush=auto_flush
)
compressed += lz4frame.compress_update(
c_context,
data)
compressed += lz4frame.compress_end(c_context)
d_context = lz4frame.create_decompression_context()
decompressed, bytes_read = lz4frame.decompress(d_context, compressed)
assert bytes_read == len(compressed)
assert decompressed == data
def test_roundtrip_multiframe_2(data):
nframes = 4
compressed = b''
ctx = lz4frame.create_compression_context()
for _ in range(nframes):
compressed += lz4frame.compress_begin(ctx)
compressed += lz4frame.compress_chunk(ctx, data)
compressed += lz4frame.compress_flush(ctx)
decompressed = b''
for _ in range(nframes):
decompressed += lz4frame.decompress(compressed)
assert len(decompressed) == nframes * len(data)
assert data * nframes == decompressed
def __flush(self, force=False):
if not force and self.__frlen < 262144:
return
assert sum(len(_[0]) for _ in self.__frame) == self.__frlen
file_off = self.__file_off
ctx = lz4frame.create_compression_context()
self.__file_write(
lz4frame.compress_begin(
ctx,
block_size=lz4frame.BLOCKSIZE_MAX4MB, # makes no harm for larger blobs
block_mode=lz4frame.BLOCKMODE_LINKED,
compression_level=5,
content_checksum=lz4frame.CONTENTCHECKSUM_ENABLED,
# sorry, no per-block checksums yet
auto_flush=False,
source_size=self.__frlen,
)
)
for blob, meta in self.__frame:
self.__file_write(lz4frame.compress_update(ctx, blob))
self.__file_write(lz4frame.compress_end(ctx))
json.dump(
{