Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_stream_source_read_variance(
self, original, level, streaming, source_read_size, read_sizes
):
cctx = zstd.ZstdCompressor(level=level)
if streaming:
source = io.BytesIO()
writer = cctx.stream_writer(source)
writer.write(original)
writer.flush(zstd.FLUSH_FRAME)
source.seek(0)
else:
frame = cctx.compress(original)
source = io.BytesIO(frame)
dctx = zstd.ZstdDecompressor()
chunks = []
with dctx.stream_reader(source, read_size=source_read_size) as reader:
while True:
read_size = read_sizes.draw(strategies.integers(-1, 131072))
chunk = reader.read(read_size)
if not chunk and read_size:
break
chunks.append(chunk)
self.assertEqual(compressor.write(b"x" * 8192), 0)
result = buffer.getvalue()
self.assertEqual(
result,
b"\x28\xb5\x2f\xfd\x00\x58\x75\x00\x00\x38\x66\x6f"
b"\x6f\x62\x61\x72\x78\x01\x00\xfc\xdf\x03\x23",
)
# Test without context manager.
buffer = io.BytesIO()
compressor = cctx.stream_writer(buffer)
self.assertEqual(compressor.write(b"foo"), 0)
self.assertEqual(compressor.write(b"bar"), 0)
self.assertEqual(compressor.write(b"x" * 8192), 0)
self.assertEqual(compressor.flush(zstd.FLUSH_FRAME), 23)
result = buffer.getvalue()
self.assertEqual(
result,
b"\x28\xb5\x2f\xfd\x00\x58\x75\x00\x00\x38\x66\x6f"
b"\x6f\x62\x61\x72\x78\x01\x00\xfc\xdf\x03\x23",
)
# Test with write_return_read=True.
compressor = cctx.stream_writer(buffer, write_return_read=True)
self.assertEqual(compressor.write(b"foo"), 3)
self.assertEqual(compressor.write(b"barbiz"), 6)
self.assertEqual(compressor.write(b"x" * 8192), 8192)
def test_partial_read(self):
# Inspired by https://github.com/indygreg/python-zstandard/issues/71.
buffer = io.BytesIO()
cctx = zstd.ZstdCompressor()
writer = cctx.stream_writer(buffer)
writer.write(bytearray(os.urandom(1000000)))
writer.flush(zstd.FLUSH_FRAME)
buffer.seek(0)
dctx = zstd.ZstdDecompressor()
reader = dctx.stream_reader(buffer)
while True:
chunk = reader.read(8192)
if not chunk:
break
for section in sections:
#print('offset: %x\t\tsize: %x\t\ttype: %d\t\tiv%s' % (section.offset, section.size, section.cryptoType, str(hx(section.cryptoCounter))))
o = nspf.partition(offset = section.offset, size = section.size, n = None, cryptoType = section.cryptoType, cryptoKey = section.cryptoKey, cryptoCounter = bytearray(section.cryptoCounter), autoOpen = True)
while not o.eof():
buffer = o.read(CHUNK_SZ)
t.update(len(buffer))
if len(buffer) == 0:
raise IOError('read failed')
written += compressor.write(buffer)
decompressedBytes += len(buffer)
t.close()
compressor.flush(zstandard.FLUSH_FRAME)
elapsed = time.time() - timestamp
minutes = elapsed / 60
seconds = elapsed % 60
speed = 0 if elapsed == 0 else (nspf.size / elapsed)
written = f.tell() - start
print('\n * Compressed at %d%% from %s to %s - %s' % (int(written * 100 / nspf.size), str(sq_tools.getSize(decompressedBytes)), str(sq_tools.getSize(written)), nspf._path))
print(' * Compressed in %02d:%02d at speed: %.1f MB/s\n' % (minutes, seconds, speed / 1000000.0))
newNsp.resize(newFileName, written)
continue
else:
print('not packed!')
f = newNsp.add(nspf._path, nspf.size)
def zstd_compress_stream(sequence):
buf = StreamingBuffer()
cctx = zstd.ZstdCompressor(level=DEFAULT_LEVEL)
with cctx.stream_writer(buf, write_return_read=False) as compressor:
yield buf.read()
for item in sequence:
if compressor.write(item):
yield buf.read()
compressor.flush(zstd.FLUSH_FRAME)
yield buf.read()