How to use the ipyparallel.serialize.canning.CannedObject function in ipyparallel

To help you get started, we’ve selected a few ipyparallel examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ipython / ipyparallel / ipyparallel / serialize / canning.py View on Github external
if g is None:
            g = {}
        if self.defaults:
            defaults = tuple(uncan(cfd, g) for cfd in self.defaults)
        else:
            defaults = None
        if self.closure:
            closure = tuple(uncan(cell, g) for cell in self.closure)
        else:
            closure = None
        newFunc = FunctionType(self.code, g, self.__name__, defaults, closure)
        return newFunc


class CannedPartial(CannedObject):
    def __init__(self, f):
        self._check_type(f)
        self.func = can(f.func)
        self.args = [ can(a) for a in f.args ]
        self.keywords = { k: can(v) for k,v in f.keywords.items() }
        self.buffers = []
        self.arg_buffer_counts = []
        self.keyword_buffer_counts = {}
        # consolidate buffers
        for canned_arg in self.args:
            if not isinstance(canned_arg, CannedObject):
                self.arg_buffer_counts.append(0)
                continue
            self.arg_buffer_counts.append(len(canned_arg.buffers))
            self.buffers.extend(canned_arg.buffers)
            canned_arg.buffers = []
github ipython / ipyparallel / ipyparallel / serialize / serialize.py View on Github external
def _extract_buffers(obj, threshold=MAX_BYTES):
    """extract buffers larger than a certain threshold"""
    buffers = []
    if isinstance(obj, CannedObject) and obj.buffers:
        for i, buf in enumerate(obj.buffers):
            nbytes = _nbytes(buf)
            if nbytes > threshold:
                # buffer larger than threshold, prevent pickling
                obj.buffers[i] = None
                buffers.append(buf)
            # buffer too small for separate send, coerce to bytes
            # because pickling buffer objects just results in broken pointers
            elif isinstance(buf, memoryview):
                obj.buffers[i] = buf.tobytes()
            elif isinstance(buf, buffer):
                obj.buffers[i] = bytes(buf)
    return buffers
github ipython / ipyparallel / ipyparallel / serialize / canning.py View on Github external
return eval(self.name, g)


class CannedCell(CannedObject):
    """Can a closure cell"""
    def __init__(self, cell):
        self.cell_contents = can(cell.cell_contents)
    
    def get_object(self, g=None):
        cell_contents = uncan(self.cell_contents, g)
        def inner():
            return cell_contents
        return py3compat.get_closure(inner)[0]


class CannedFunction(CannedObject):

    def __init__(self, f):
        self._check_type(f)
        self.code = f.__code__
        if f.__defaults__:
            self.defaults = [ can(fd) for fd in f.__defaults__ ]
        else:
            self.defaults = None
        
        closure = py3compat.get_closure(f)
        if closure:
            self.closure = tuple( can(cell) for cell in closure )
        else:
            self.closure = None
        
        self.module = f.__module__ or '__main__'
github ipython / ipyparallel / ipyparallel / serialize / serialize.py View on Github external
def _restore_buffers(obj, buffers):
    """restore buffers extracted by """
    if isinstance(obj, CannedObject) and obj.buffers:
        for i,buf in enumerate(obj.buffers):
            if buf is None:
                obj.buffers[i] = buffers.pop(0)
github ipython / ipyparallel / ipyparallel / serialize / canning.py View on Github external
mro = []
        else:
            mro = cls.mro()
        
        self.parents = [ can(c) for c in mro[1:] ]
        self.buffers = []

    def _check_type(self, obj):
        assert isinstance(obj, class_type), "Not a class type"

    def get_object(self, g=None):
        parents = tuple(uncan(p, g) for p in self.parents)
        return type(self.name, parents, uncan_dict(self._canned_dict, g=g))


class CannedArray(CannedObject):
    def __init__(self, obj):
        from numpy import ascontiguousarray
        self.shape = obj.shape
        self.dtype = obj.dtype.descr if obj.dtype.fields else obj.dtype.str
        self.pickled = False
        if sum(obj.shape) == 0:
            self.pickled = True
        elif obj.dtype == 'O':
            # can't handle object dtype with buffer approach
            self.pickled = True
        elif obj.dtype.fields and any(dt == 'O' for dt,sz in obj.dtype.fields.values()):
            self.pickled = True
        if self.pickled:
            # just pickle it
            from . import serialize
            self.buffers = [serialize.pickle.dumps(obj, serialize.PICKLE_PROTOCOL)]
github ipython / ipyparallel / ipyparallel / serialize / canning.py View on Github external
def __init__(self, f):
        self._check_type(f)
        self.func = can(f.func)
        self.args = [ can(a) for a in f.args ]
        self.keywords = { k: can(v) for k,v in f.keywords.items() }
        self.buffers = []
        self.arg_buffer_counts = []
        self.keyword_buffer_counts = {}
        # consolidate buffers
        for canned_arg in self.args:
            if not isinstance(canned_arg, CannedObject):
                self.arg_buffer_counts.append(0)
                continue
            self.arg_buffer_counts.append(len(canned_arg.buffers))
            self.buffers.extend(canned_arg.buffers)
            canned_arg.buffers = []
        for key in sorted(self.keywords):
            canned_kwarg = self.keywords[key]
            if not isinstance(canned_kwarg, CannedObject):
                continue
            self.keyword_buffer_counts[key] = len(canned_kwarg.buffers)
            self.buffers.extend(canned_kwarg.buffers)
            canned_kwarg.buffers = []
github ipython / ipyparallel / ipyparallel / serialize / canning.py View on Github external
self.buffers = []

    def get_object(self, g=None):
        if g is None:
            g = {}
        obj = self.obj
        for key in self.keys:
            setattr(obj, key, uncan(getattr(obj, key), g))
        
        if self.hook:
            self.hook = uncan(self.hook, g)
            self.hook(obj, g)
        return self.obj
    

class Reference(CannedObject):
    """object for wrapping a remote reference by name."""
    def __init__(self, name):
        if not isinstance(name, string_types):
            raise TypeError("illegal name: %r"%name)
        self.name = name
        self.buffers = []

    def __repr__(self):
        return ""%self.name

    def get_object(self, g=None):
        if g is None:
            g = {}
        
        return eval(self.name, g)
github ipython / ipyparallel / ipyparallel / serialize / canning.py View on Github external
canned_arg.buffers = self.buffers[:buf_count]
                self.buffers = self.buffers[buf_count:]
            for key in sorted(self.keyword_buffer_counts):
                buf_count = self.keyword_buffer_counts[key]
                canned_kwarg = self.keywords[key]
                canned_kwarg.buffers = self.buffers[:buf_count]
                self.buffers = self.buffers[buf_count:]
            assert len(self.buffers) == 0

        args = [ uncan(a, g) for a in self.args ]
        keywords = { k: uncan(v, g) for k,v in self.keywords.items() }
        func = uncan(self.func, g)
        return functools.partial(func, *args, **keywords)


class CannedClass(CannedObject):

    def __init__(self, cls):
        self._check_type(cls)
        self.name = cls.__name__
        self.old_style = not isinstance(cls, type)
        self._canned_dict = {}
        for k,v in cls.__dict__.items():
            if k not in ('__weakref__', '__dict__'):
                self._canned_dict[k] = can(v)
        if self.old_style:
            mro = []
        else:
            mro = cls.mro()
        
        self.parents = [ can(c) for c in mro[1:] ]
        self.buffers = []
github ipython / ipyparallel / ipyparallel / serialize / canning.py View on Github external
self.args = [ can(a) for a in f.args ]
        self.keywords = { k: can(v) for k,v in f.keywords.items() }
        self.buffers = []
        self.arg_buffer_counts = []
        self.keyword_buffer_counts = {}
        # consolidate buffers
        for canned_arg in self.args:
            if not isinstance(canned_arg, CannedObject):
                self.arg_buffer_counts.append(0)
                continue
            self.arg_buffer_counts.append(len(canned_arg.buffers))
            self.buffers.extend(canned_arg.buffers)
            canned_arg.buffers = []
        for key in sorted(self.keywords):
            canned_kwarg = self.keywords[key]
            if not isinstance(canned_kwarg, CannedObject):
                continue
            self.keyword_buffer_counts[key] = len(canned_kwarg.buffers)
            self.buffers.extend(canned_kwarg.buffers)
            canned_kwarg.buffers = []