How to use the cloudpickle.dumps function in cloudpickle

To help you get started, we’ve selected a few cloudpickle examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github devitocodes / devito / tests / test_pickle.py View on Github external
assert np.isclose(np.linalg.norm(model.vp-new_model.vp), 0)

    f0 = .010
    dt = model.critical_dt
    t0 = 0.0
    tn = 350.0
    time_range = TimeAxis(start=t0, stop=tn, step=dt)

    # Test TimeAxis pickling
    pkl_time_range = pickle.dumps(time_range)
    new_time_range = pickle.loads(pkl_time_range)
    assert np.isclose(np.linalg.norm(time_range.time_values),
                      np.linalg.norm(new_time_range.time_values))

    # Test Class Constant pickling
    pkl_origin = pickle.dumps(model.grid.origin)
    new_origin = pickle.loads(pkl_origin)

    for a, b in zip(model.grid.origin, new_origin):
        assert a.compare(b) == 0

    # Test Class TimeDimension pickling
    time_dim = TimeDimension(name='time', spacing=Constant(name='dt', dtype=np.float32))
    pkl_time_dim = pickle.dumps(time_dim)
    new_time_dim = pickle.loads(pkl_time_dim)
    assert time_dim.spacing._value == new_time_dim.spacing._value

    # Test Class SteppingDimension
    stepping_dim = SteppingDimension(name='t', parent=time_dim)
    pkl_stepping_dim = pickle.dumps(stepping_dim)
    new_stepping_dim = pickle.loads(pkl_stepping_dim)
    assert stepping_dim.is_Time == new_stepping_dim.is_Time
github descarteslabs / descarteslabs-python / descarteslabs / client / services / tasks / tasks.py View on Github external
def _serialize_function(function):
    # Note; In Py3 cloudpickle and base64 handle bytes objects only, so we need to
    # decode it into a string to be able to json dump it again later.
    cp_version = getattr(cloudpickle, "__version__", None)
    if cp_version is None or cp_version != "0.4.0":
        warn(
            (
                "You must use version 0.4.0 of cloudpickle for compatibility with the Tasks client. {} found."
            ).format(cp_version)
        )

    encoded_bytes = base64.b64encode(cloudpickle.dumps(function))
    return encoded_bytes.decode("ascii")
github medipixel / rl_algorithms / algorithms / common / env / multiprocessing_env.py View on Github external
def __getstate__(self):
        import cloudpickle

        return cloudpickle.dumps(self.x)
github opensistemas-hub / osbrain / osbrain / agent.py View on Github external
addr=None,
        serializer=None,
        transport=None,
        base=Agent,
        attributes=None,
    ):
        super().__init__()
        self.name = name
        self._daemon = None
        self._host, self.port = address_to_host_port(addr)
        if self.port is None:
            self.port = 0
        self.nsaddr = nsaddr
        self._serializer = serializer
        self._transport = transport
        self.base = cloudpickle.dumps(base)
        self._shutdown_event = multiprocessing.Event()
        self._queue = multiprocessing.Queue()
        self._sigint = False
        self.attributes = attributes
github codepr / tasq / tasq / remote / sockets.py View on Github external
def pickle_and_compress(data: object) -> bytes:
    """Pickle data with cloudpickle to bytes and then compress the resulting
    stream with zlib
    """
    pickled_data = cloudpickle.dumps(data)
    zipped_data = zlib.compress(pickled_data)
    return zipped_data
github nosyndicate / pytorchrl / pytorchrl / misc / instrument.py View on Github external
variant=variant,
                use_cloudpickle=use_cloudpickle
            )
        ]

    global exp_count
    global remote_confirmed
    config.USE_GPU = use_gpu

    # params_list = []

    for task in batch_tasks:
        call = task.pop("stub_method_call")
        if use_cloudpickle:
            import cloudpickle
            data = base64.b64encode(cloudpickle.dumps(call)).decode("utf-8")
        else:
            data = base64.b64encode(pickle.dumps(call)).decode("utf-8")
        task["args_data"] = data
        exp_count += 1
        params = dict(kwargs)
        if task.get("exp_name", None) is None:
            task["exp_name"] = "%s_%s_%04d" % (
                exp_prefix, timestamp, exp_count)
        if task.get("log_dir", None) is None:
            task["log_dir"] = config.LOG_DIR + "/local/" + \
                              exp_prefix.replace("_", "-") + "/" + task["exp_name"]
        if task.get("variant", None) is not None:
            variant = task.pop("variant")
            if "exp_name" not in variant:
                variant["exp_name"] = task["exp_name"]
            task["variant_data"] = base64.b64encode(pickle.dumps(variant)).decode("utf-8")
github DSC-SPIDAL / twister2 / twister2 / python-support / src / main / python / twister2 / tset / fn / factory / PartitionFunctions.py View on Github external
def build(self, partition_func: PartitionFunc):
        # send python dump to java -> create a java object in JVM -> get the ref back
        return self.__java_ref.build(cp.dumps(partition_func))
github opensistemas-hub / osbrain / osbrain / agent.py View on Github external
    'cloudpickle': lambda message: cloudpickle.dumps(message, -1),
    'dill': lambda message: dill.dumps(message, -1),
github emlynoregan / appenginetaskutils / taskutils / future.py View on Github external
#         logdebug("runfuture: ancestorkey=%s" % immediateancestorkey)
    #         logdebug("runfuture: newkey=%s" % newkey)
    
            futureobj = _Future(key=newkey) # just use immediate ancestor to keep entity groups at local level, not one for the entire tree
            
            futureobj.parentkey = parentkey # but keep the real parent key for lookups
            
            if onsuccessf:
                futureobj.onsuccessfser = cloudpickle.dumps(onsuccessf)
            if onfailuref:
                futureobj.onfailurefser = cloudpickle.dumps(onfailuref)
            if onallchildsuccessf:
                futureobj.onallchildsuccessfser = cloudpickle.dumps(onallchildsuccessf)
            if onprogressf:
                futureobj.onprogressfser = cloudpickle.dumps(onprogressf)
            futureobj.taskkwargsser = cloudpickle.dumps(taskkwargs)
    
    #         futureobj.onsuccessfser = yccloudpickle.dumps(onsuccessf) if onsuccessf else None
    #         futureobj.onfailurefser = yccloudpickle.dumps(onfailuref) if onfailuref else None
    #         futureobj.onallchildsuccessfser = yccloudpickle.dumps(onallchildsuccessf) if onallchildsuccessf else None
    #         futureobj.onprogressfser = yccloudpickle.dumps(onprogressf) if onprogressf else None
    #         futureobj.taskkwargsser = yccloudpickle.dumps(taskkwargs)
            
    #         futureobj.set_weight(weight if weight >= 1 else 1)
            
            futureobj.timeoutsec = timeoutsec
            
            futureobj.name = futurename
                
            futureobj.put()
    #         logdebug("runfuture: childkey=%s" % futureobj.key)
github vaexio / vaex / packages / vaex-core / vaex / expression.py View on Github external
def pickle(self, function):
        return pickle.dumps(function)