How to use the distributed.protocol.serialize.dask_serialize.register function in distributed

To help you get started, we’ve selected a few distributed examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dask / distributed / distributed / protocol / netcdf4.py View on Github external
@dask_serialize.register(netCDF4.Variable)
def serialize_netcdf4_variable(x):
    header, _ = serialize(x.group())
    header["parent-type"] = header["type"]
    header["parent-type-serialized"] = header["type-serialized"]
    header["name"] = x.name
    return header, []
github dask / distributed / distributed / protocol / numpy.py View on Github external
@dask_serialize.register(np.ma.core.MaskedConstant)
def serialize_numpy_ma_masked(x):
    return {}, []
github dask / distributed / distributed / protocol / keras.py View on Github external
@dask_serialize.register(keras.Model)
def serialize_keras_model(model):
    import keras

    if keras.__version__ < "1.2.0":
        raise ImportError(
            "Need Keras >= 1.2.0. Try python -m pip install keras --upgrade --no-deps"
        )

    header = model._updated_config()
    weights = model.get_weights()
    headers, frames = list(zip(*map(serialize, weights)))
    header["headers"] = headers
    header["nframes"] = [len(L) for L in frames]
    frames = [frame for L in frames for frame in L]
    return header, frames
github dask / distributed / distributed / protocol / h5py.py View on Github external
@dask_serialize.register(h5py.File)
def serialize_h5py_file(f):
    if f.mode != "r":
        raise ValueError("Can only serialize read-only h5py files")
    return {"filename": f.filename}, []
github dask / distributed / distributed / protocol / arrow.py View on Github external
@dask_serialize.register(pyarrow.RecordBatch)
def serialize_batch(batch):
    sink = pyarrow.BufferOutputStream()
    writer = pyarrow.RecordBatchStreamWriter(sink, batch.schema)
    writer.write_batch(batch)
    writer.close()
    buf = sink.getvalue()
    header = {}
    frames = [buf]
    return header, frames
github dask / distributed / distributed / protocol / sparse.py View on Github external
@dask_serialize.register(sparse.COO)
def serialize_sparse(x):
    coords_header, coords_frames = serialize(x.coords)
    data_header, data_frames = serialize(x.data)

    header = {
        "coords-header": coords_header,
        "data-header": data_header,
        "shape": x.shape,
        "nframes": [len(coords_frames), len(data_frames)],
    }
    return header, coords_frames + data_frames
github dask / distributed / distributed / protocol / cupy.py View on Github external
    @dask_serialize.register(MatDescriptor)
    def serialize_cupy_matdescriptor(x):
        header, frames = {}, []
        return header, frames
github dask / distributed / distributed / protocol / rmm.py View on Github external
    @dask_serialize.register(rmm.DeviceBuffer)
    def dask_serialize_rmm_device_buffer(x):
        header, frames = cuda_serialize_rmm_device_buffer(x)
        frames = [numba.cuda.as_cuda_array(f).copy_to_host().data for f in frames]
        return header, frames
github dask / distributed / distributed / protocol / numba.py View on Github external
@dask_serialize.register(numba.cuda.devicearray.DeviceNDArray)
def dask_serialize_numba_ndarray(x):
    header, frames = cuda_serialize_numba_ndarray(x)
    frames = [memoryview(f.copy_to_host()) for f in frames]
    return header, frames
github dask / distributed / distributed / protocol / netcdf4.py View on Github external
@dask_serialize.register(netCDF4.Group)
def serialize_netcdf4_group(g):
    parent = g
    while parent.parent:
        parent = parent.parent
    header, _ = serialize_netcdf4_dataset(parent)
    header["path"] = g.path
    return header, []