How to use the distributed.client.Future function in distributed

To help you get started, we’ve selected a few distributed examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dask / distributed / distributed / collections.py View on Github external
def _futures_to_collection(futures, client=None, **kwargs):
    client = default_client(client)
    element = futures
    while not isinstance(element, Future):
        element = element[0]

    typ = yield client.submit(type, element)
    if 'pandas' in typ.__module__:
        func = _futures_to_dask_dataframe
    elif 'numpy' in typ.__module__:
        func = _futures_to_dask_array
    elif issubclass(typ, (tuple, list, set, frozenset)):
        func = _futures_to_dask_bag
    else:
        raise NotImplementedError("First future of type %s.  Expected "
                "numpy or pandas object" % typ.__name__)

    result = yield func(futures, client=client, **kwargs)
    raise gen.Return(result)
github dask / distributed / distributed / client.py View on Github external
def __setstate__(self, state):
        key, address = state
        c = get_client(address)
        Future.__init__(self, key, c)
        c._send_to_scheduler({'op': 'update-graph', 'tasks': {},
                              'keys': [tokey(self.key)], 'client': c.id})
github dask / distributed / distributed / client.py View on Github external
def _graph_to_futures(self, dsk, keys, restrictions=None,
                          loose_restrictions=None, priority=None,
                          user_priority=0, resources=None, retries=None):
        with self._lock:
            keyset = set(keys)
            flatkeys = list(map(tokey, keys))
            futures = {key: Future(key, self, inform=False) for key in keyset}

            values = {k for k, v in dsk.items() if isinstance(v, Future)
                      and k not in keyset}
            if values:
                dsk = dask.optimization.inline(dsk, keys=values)

            d = {k: unpack_remotedata(v) for k, v in dsk.items()}
            extra_keys = set.union(*[v[1] for v in d.values()]) if d else set()
            dsk2 = str_graph({k: v[0] for k, v in d.items()}, extra_keys)
            dsk3 = {k: v for k, v in dsk2.items() if k is not v}

            if restrictions:
                restrictions = keymap(tokey, restrictions)
                restrictions = valmap(list, restrictions)

            if loose_restrictions is not None:
github dask / distributed / distributed / client.py View on Github external
if not ncores:
                    raise ValueError("No valid workers")

                _, who_has, nbytes = yield scatter_to_workers(ncores, data2,
                                                              report=False,
                                                              rpc=self.rpc)

                yield self.scheduler.update_data(who_has=who_has,
                                                 nbytes=nbytes,
                                                 client=self.id)
            else:
                yield self.scheduler.scatter(data=data2, workers=workers,
                                             client=self.id,
                                             broadcast=broadcast)

        out = {k: Future(k, self, inform=False) for k in data}
        for key, typ in types.items():
            self.futures[key].finish(type=typ)

        if direct and broadcast:
            n = None if broadcast is True else broadcast
            yield self._replicate(list(out.values()), workers=workers, n=n)

        if issubclass(input_type, (list, tuple, set, frozenset)):
            out = input_type(out[k] for k in names)

        if unpack:
            assert len(out) == 1
            out = list(out.values())[0]
        raise gen.Return(out)
github dask / distributed / distributed / client.py View on Github external
@partial(normalize_token.register, Future)
def normalize_future(f):
    return [f.key, type(f)]
github dask / distributed / distributed / variable.py View on Github external
async def _set(self, value):
        if isinstance(value, Future):
            await self.client.scheduler.variable_set(
                key=tokey(value.key), name=self.name
            )
        else:
            await self.client.scheduler.variable_set(data=value, name=self.name)
github dask / distributed / distributed / actor.py View on Github external
def __init__(self, cls, address, key, worker=None):
        self._cls = cls
        self._address = address
        self.key = key
        self._future = None
        if worker:
            self._worker = worker
            self._client = None
        else:
            try:
                self._worker = get_worker()
            except ValueError:
                self._worker = None
            try:
                self._client = default_client()
                self._future = Future(key)
            except ValueError:
                self._client = None