How to use the lz4.frame.decompress function in lz4

To help you get started, we’ve selected a few lz4 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github python-lz4 / python-lz4 / tests / frame / test_frame_0.py View on Github external
def test_decompress_return_type_1():
    c = lz4frame.compress(b'', return_bytearray=False)
    r = lz4frame.decompress(
        c,
        return_bytearray=False,
        return_bytes_read=False
    )
    assert isinstance(r, bytes)
github python-lz4 / python-lz4 / tests / frame / test_frame_1.py View on Github external
block_size=block_size,
        block_linked=block_linked,
        content_checksum=content_checksum,
        block_checksum=block_checksum,
    )

    get_frame_info_check(
        compressed,
        len(data),
        store_size,
        block_size,
        block_linked,
        content_checksum,
        block_checksum,
    )
    decompressed, bytes_read = lz4frame.decompress(
        compressed, return_bytes_read=True)
    assert bytes_read == len(compressed)
    assert decompressed == data
github ooni / pipeline / af / shovel / check_sanitised.py View on Github external
datum = ujson.loads(datum)
                doc["frame_off"] = frame_off
                doc["frame_size"] = frame_size
                doc["intra_off"] = intra_off
                doc["intra_size"] = doc["text_size"]
                doc["datum"] = datum
                yield DATUM, doc
                del intra_off, datum

            elif t == "frame":
                # {"file_off": 0, "file_size": 162864, "text_off": 0, "text_size": 362462, … }
                frame_off, frame_size = doc["file_off"], doc["file_size"]
                assert filefd.tell() == frame_off
                blob = filefd.read(frame_size)
                assert len(blob) == frame_size
                blob = lz4frame.decompress(blob)
                assert len(blob) == doc["text_size"]
                text_off = doc["text_off"]

            elif t == "/frame":
                del frame_off, frame_size, text_off, blob

            elif t == "report":
                # {"orig_sha1": "HO…U=",
                #  "src_size": 104006450,
                #  "textname": "2017-01-01/20161231T000030Z-US-AS…-0.2.0-probe.json", …}
                yield REPORT_START, doc

            elif t == "/report":
                # {"info": "",
                #  "src_cutoff": 49484700, … }
                yield REPORT_END, doc
github CoffeaTeam / coffea / coffea / processor / executor.py View on Github external
def _maybe_decompress(item):
    if isinstance(item, AccumulatorABC):
        return item
    try:
        item = pickle.loads(lz4f.decompress(item))
        if isinstance(item, AccumulatorABC):
            return item
        raise RuntimeError
    except (RuntimeError, pickle.UnpicklingError):
        raise ValueError("Executors can only reduce accumulators or LZ4-compressed pickled accumulators")
github ray-project / ray / python / ray / rllib / utils / compression.py View on Github external
def unpack(data):
    if LZ4_ENABLED:
        data = base64.b64decode(data)
        data = lz4.frame.decompress(data)
        data = pyarrow.deserialize(data)
    return data
github adobe / sbmc / sbmc / datasets.py View on Github external
def _read_compressed(self, fid):
        """The sample and image data is compressed using lz4, decompress it.

        Args:
            fid(filepointer): pointer to the open .bin file.
        """
        nbytes = struct.unpack('i', fid.read(4))[0]
        buf = fid.read(nbytes)
        data = lz4.frame.decompress(buf)
        return data
github ooni / pipeline / af / shovel / centrifugation.py View on Github external
with open(os.path.join(autoclaved_root, filename)) as fd:
                fd = ChecksummingTee(fd, NopTeeFd)
                for (frame_off, frame_size), itframe in groupby(
                    itfile, itemgetter(4, 5)
                ):
                    fd.seek(frame_off)
                    blob = fd.read(frame_size)
                    if len(blob) != frame_size:
                        raise RuntimeError(
                            "Unexpected end of file",
                            filename,
                            frame_off,
                            frame_size,
                            len(blob),
                        )
                    blob = lz4frame.decompress(blob)
                    for (
                        _,
                        _,
                        _,
                        _,
                        _,
                        _,
                        intra_off,
                        intra_size,
                        code_ver,
                        autoclaved_no,
                        report_no,
                        msm_no,
                    ) in itframe:
                        datum = blob[intra_off : intra_off + intra_size]
                        if len(datum) != intra_size:
github CoffeaTeam / coffea / coffea / processor / spark / spark_executor.py View on Github external
def reduce_histos_raw(df, processor_instance, lz4_clevel):
    histos = df['histos']
    mask = (histos.str.len() > 0)
    outhist = processor_instance.accumulator.identity()
    for line in histos[mask]:
        outhist.add(pkl.loads(lz4f.decompress(line)))
    return pd.DataFrame(data={'histos': np.array([lz4f.compress(pkl.dumps(outhist), compression_level=lz4_clevel)], dtype='O')})
github Oslandia / py3dtiles / py3dtiles / points / task / pnts_writer.py View on Github external
def run(sender, data, node_name, folder, write_rgb):
    # we can safely write the .pnts file
    if len(data):
        root = pickle.loads(gzip.decompress(data))
        # print('write ', node_name.decode('ascii'))
        total = 0
        for name in root:
            node = _DummyNode(pickle.loads(root[name]))
            total += node_to_pnts(name, node, folder, write_rgb)[0]

        sender.send_multipart([b'pnts', struct.pack('>I', total), node_name])