How to use the cloudpickle.CloudPickler function in cloudpickle

To help you get started, we’ve selected a few cloudpickle examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dputhier / pygtftk / pygtftk / cmd_manager.py View on Github external
def dump_plugins(self):
        """Save the plugins into a pickle object."""

        message("Dumping plugins", force=True)

        f_handler = open(CmdManager.dumped_plugin_path, "wb")

        pick = cloudpickle.CloudPickler(f_handler)
        pick.dump((self.cmd_obj_list, self.parser))
        f_handler.close()
        # self.load_plugins()
github dagster-io / dagster / python_modules / dagma / dagma / serialize.py View on Github external
def serialize(obj):
    """Serializes an object and all of its dependencies.

    Args:
        obj (object): The object to serialize

    Returns:
        (bytes): The serialized representation of the object and its dependencies.
    """
    module_manager = ModuleDependencyAnalyzer()

    stringio = StringIO()
    pickler = CloudPickler(stringio, -1)

    pickler.dump(obj)

    for module in pickler.modules:
        module_manager.add(module.__name__)

    module_paths = module_manager.get_and_clear_paths()

    module_data = create_mod_data(module_paths)

    return pickle.dumps({'obj': stringio.getvalue(), 'module_data': module_data}, -1)
github bmabey / provenance / provenance / _dependencies.py View on Github external
import io
import pickle

import cloudpickle

from . import repos as r

Pickler = cloudpickle.CloudPickler


class DependencyWalker(Pickler):

    def __init__(self):
        self.stream = io.BytesIO()
        self.dependents = []
        self.branches = []
        protocol = pickle.DEFAULT_PROTOCOL
        Pickler.__init__(self, self.stream, protocol=protocol)

    def save(self, obj):
        if isinstance(obj, r.Artifact):
            self.dependents.append(obj)
        elif r.is_proxy(obj):
            self.dependents.append(obj.artifact)
github bmabey / provenance / provenance / hashing.py View on Github external
import types
from functools import singledispatch

import cloudpickle


@singledispatch
def value_repr(obj):
    method = getattr(obj, 'value_repr', None)
    if callable(method):
        return method()
    else:
        return obj


Pickler = cloudpickle.CloudPickler


class _ConsistentSet(object):
    """ Class used to ensure the hash of Sets is preserved
        whatever the order of its items.
    """

    def __init__(self, _set):
        # Forces order of elements in set to ensure consistent hash.
        self._type = type(_set)
        try:
            # Trying first to order the set assuming the type of elements is
            # consistent and orderable.
            # This fails on python 3 when elements are unorderable
            # but we keep it in a try as it's faster.
            self._sequence = sorted(_set)
github ucbrise / clipper / clipper_admin / clipper_admin / deployers / deployer_utils.py View on Github external
def save_python_function(name, func):
    predict_fname = "func.pkl"

    # Serialize function
    s = StringIO()
    c = CloudPickler(s, 2)
    c.dump(func)
    serialized_prediction_function = s.getvalue()

    # Set up serialization directory
    serialization_dir = os.path.abspath(tempfile.mkdtemp(suffix='clipper'))
    logger.info("Saving function to {}".format(serialization_dir))

    # Write out function serialization
    func_file_path = os.path.join(serialization_dir, predict_fname)
    if sys.version_info < (3, 0):
        with open(func_file_path, "w") as serialized_function_file:
            serialized_function_file.write(serialized_prediction_function)
    else:
        with open(func_file_path, "wb") as serialized_function_file:
            serialized_function_file.write(serialized_prediction_function)
    logging.info("Serialized and supplied predict function")
github ray-project / ray / python / ray / pickling.py View on Github external
def _fill_function(func, globals, defaults, closure, dict):
  """Fill in the resst of the function data.

  This fills in the rest of function data into the skeleton function object
  that were created via _make_skel_func(), including closures.
  """
  result = cloudpickle._fill_function(func, globals, defaults, dict)
  if pythonapi is not None:
    for i, v in enumerate(closure):
      pythonapi.PyCell_Set(c_void_p(id(result.__closure__[i])),
                           c_void_p(id(v)))
  return result


class BetterPickler(CloudPickler):
  def save_function_tuple(self, func):
    (code, f_globals, defaults,
     closure, dct, base_globals) = self.extract_func_data(func)

    self.save(_fill_function)
    self.write(pickle.MARK)

    self.save(_make_skel_func if pythonapi else cloudpickle._make_skel_func)
    self.save((code,
               (map(lambda _: cloudpickle._make_cell(None), closure)
                if closure and pythonapi is not None
                else closure),
               base_globals))
    self.write(pickle.REDUCE)
    self.memoize(func)