How to use the cloudpickle.loads function in cloudpickle

To help you get started, we’ve selected a few cloudpickle examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github DeepX-inc / machina / tests / test_cloud_pickle.py View on Github external
def tearDownClass(cls):
        cloudpickle.loads(cls.r.get('env'))
        cloudpickle.loads(cls.r.get('traj'))
        cloudpickle.loads(cls.r.get('gpol'))
        cloudpickle.loads(cls.r.get('dpol'))
        cloudpickle.loads(cls.r.get('mpcpol'))
        cloudpickle.loads(cls.r.get('qfunc'))
        cloudpickle.loads(cls.r.get('aqpol'))
        cloudpickle.loads(cls.r.get('vfunc'))
        cloudpickle.loads(cls.r.get('mcpol'))
github pygridtools / gridmap / gridmap / data.py View on Github external
def zloads(pickled_data):
    """
    loads pickleable object from bz2 compressed string

    :param pickled_data: BZ2 compressed byte sequence
    :type pickled_data: bytes

    :returns: An unpickled version of the compressed byte sequence.
    """
    return cloudpickle.loads(bz2.decompress(pickled_data))
github tbenthompson / taskloaf / taskloaf / mpi.py View on Github external
def mpistart(f, i):
    c = MPIComm()
    return cloudpickle.loads(f)(c)
github ray-project / ray / streaming / python / operator.py View on Github external
def logic(self):
        return cloudpickle.loads(self._logic)
github PrefectHQ / prefect / src / prefect / environments / local.py View on Github external
def deserialize_flow_from_bytes(self, serialized_flow: bytes) -> "prefect.Flow":
        """
        Deserializes a Flow to binary.

        Args:
            - serialized_flow (bytes): the Flow to deserialize

        Returns:
            - Flow: the deserialized Flow
        """
        decoded_pickle = base64.b64decode(serialized_flow)
        decrypted_pickle = Fernet(self.encryption_key).decrypt(decoded_pickle)
        flow = cloudpickle.loads(decrypted_pickle)
        return flow
github PrefectHQ / prefect / src / prefect / engine / cloud / result_handler.py View on Github external
Read a result from the given URI location.

        Args:
            - uri (str): the path to the location of a result

        Returns:
            - the deserialized result from the provided URI
        """
        self._initialize_client()
        self.logger.debug("Starting to read result from {}...".format(uri))
        res = self._client.get(  # type: ignore
            "/", server=self.result_handler_service, **{"uri": uri}
        )

        try:
            return_val = cloudpickle.loads(base64.b64decode(res.get("result", "")))
        except EOFError:
            return_val = None
        self.logger.debug("Finished reading result from {}...".format(uri))

        return return_val
github PrefectHQ / prefect / src / prefect / environments / storage / gcs.py View on Github external
if not flow_location in self.flows.values():
            raise ValueError("Flow is not contained in this Storage")

        bucket = self._gcs_client.get_bucket(self.bucket)

        self.logger.info("Downloading {} from {}".format(flow_location, self.bucket))

        blob = bucket.get_blob(flow_location)
        if not blob:
            raise StorageError(
                "Flow not found in bucket: flow={} bucket={}".format(
                    flow_location, self.bucket
                )
            )
        content = blob.download_as_string()
        return cloudpickle.loads(content)
github PrefectHQ / prefect / src / prefect / engine / result_handlers / local_result_handler.py View on Github external
Returns:
            - the read result from the provided file
        """

        # based on the path given, this may never work?!? that is, if the abs path is given then what's the point?
        # FIX: this should be based on a key within the stored constructor dir

        # if self.accumulate:
        #     sequence = '0'
        #     for dir in sorted(glob.glob(os.path.join(self.dir, "*") + os.path.sep), reverse=True):
        #         sequence = dir
        #         break

        self.logger.debug("Starting to read result from {}...".format(fpath))
        with open(fpath, "rb") as f:
            val = cloudpickle.loads(f.read())
        self.logger.debug("Finished reading result from {}...".format(fpath))
        return val
github tbenthompson / taskloaf / examples / client_server.py View on Github external
def launch_job(f):
        with ctx.socket(zmq.REQ) as send_socket:
            send_socket.connect(bind_addr)
            task_bytes = cloudpickle.dumps(f)
            send_socket.send(task_bytes)
            out_bytes = send_socket.recv()
            out = cloudpickle.loads(out_bytes)
            return out