How to use the distributed.LocalCluster function in distributed

To help you get started, we’ve selected a few distributed examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github OryJonay / anytime-gridsearch / AnyTimeGridSearchCV / grids / tests.py View on Github external
def test_dask_cv_single(self):
        test_cluster = LocalCluster(1)
        test_client = Client(test_cluster)
        iris = load_iris()
        reg = tree.DecisionTreeClassifier()
        cv_score = test_client.submit(cross_val_score,reg,iris.data,iris.target)
        self.assertGreater(cv_score.result().mean(), 0)
        test_cluster.scale_up(4)
        _cv_results = {'reg_%i':test_client.submit(cross_val_score,
                                                  tree.DecisionTreeClassifier(min_samples_leaf=i),iris.data,iris.target)
                      for i in range(5)} 
        cv_results = test_client.gather(list(_cv_results.values()))
        for cv_result in cv_results:
            self.assertGreaterEqual(cv_result.mean(), 0)
github OryJonay / anytime-gridsearch / AnyTimeGridSearchCV / grids / tests.py View on Github external
def test_grids_list_post(self):
        iris = load_iris()
        client = DjangoClient()
        response = client.post(reverse('grids_list'), data={'classifier':'DecisionTreeClassifier'})
        self.assertEqual(201, response.status_code)
        print(response.data)
        gs = ATGridSearchCV(tree.DecisionTreeClassifier,{'criterion':['gini','entropy'],
                                                         'max_depth':range(1,6),
                                                         'max_features':['auto','log2']},
                            client_kwargs={'address':LocalCluster()},
                            uuid=response.data.get('uuid',''),
                            webserver_url=self.live_server_url)
        gs.fit(iris.data, iris.target)
        response = client.get(reverse('grids_list'))
        self.assertEqual(200,response.status_code)
        self.assertEqual(1, len(response.data))
github dask / dask-drmaa / dask_drmaa / core.py View on Github external
Examples
        --------
        >>> from dask_drmaa import DRMAACluster          # doctest: +SKIP
        >>> cluster = DRMAACluster()                     # doctest: +SKIP
        >>> cluster.start_workers(10)                    # doctest: +SKIP

        >>> from distributed import Client               # doctest: +SKIP
        >>> client = Client(cluster)                     # doctest: +SKIP

        >>> future = client.submit(lambda x: x + 1, 10)  # doctest: +SKIP
        >>> future.result()                              # doctest: +SKIP
        11
        """
        self.hostname = hostname or socket.gethostname()
        logger.info("Start local scheduler at %s", self.hostname)
        self.local_cluster = LocalCluster(n_workers=0, ip=ip, **kwargs)

        if script is None:
            fn = os.path.abspath(tempfile.mktemp(
                suffix='.sh',
                prefix='dask-worker-script-',
                dir=os.path.curdir,
            ))
            self.script = fn
            self._should_cleanup_script = True

            script_contents = make_job_script(executable=worker_bin_path,
                                              name='%s.%s' % (JOB_ID, TASK_ID),
                                              preexec=preexec_commands)
            with open(fn, 'wt') as f:
                f.write(script_contents)
github MateLabs / AutoOut / app / outlier_treatment / main.py View on Github external
update_server(data)
    except Exception as e:
        print("Error:", e)
        data = {'experiment_id': experiment_id,
                "process_status_id": 4}
        update_server(data)


def update_server(data):
    endpoint = "http://127.0.0.1:8000/app/status/update"
    r = requests.post(url=endpoint, data=data)
    print(r.status_code)


if __name__ == '__main__':
    c = LocalCluster(processes=False, n_workers=2, threads_per_worker=3)
    dask_client = Client(c)

    delayed_tasks = []
    for index, dataset_path in enumerate(glob.glob("datasets/csv/*.csv")[:1]):
        spaces = [{"model": "zscore", "params": {}},
                  {"model": "DBSCAN", "params": {}},
                  # {"model": "OPTICS", "params": {}},
                  {"model": "IsolationForest", "params": {}},
                  {"model": "EllipticEnvelope", "params": {}},
                  {"model": "OneClassSVM", "params": {}},
                  {"model": "LocalOutlierFactor", "params": {}},
                  ]

        data_frame = pd.read_csv(dataset_path)
        print(data_frame.shape)
github regro / cf-scripts / conda_forge_tick / utils.py View on Github external
"""
    if kind == "thread":
        with ThreadPoolExecutor(max_workers=max_workers) as pool_t:
            yield pool_t
    elif kind == "process":
        with ProcessPoolExecutor(max_workers=max_workers) as pool_p:
            yield pool_p
    elif kind in ["dask", "dask-process", "dask-thread"]:
        import dask
        import distributed
        from distributed.cfexecutor import ClientExecutor

        processes = kind == "dask" or kind == "dask-process"

        with dask.config.set({"distributed.worker.daemon": daemon}):
            with distributed.LocalCluster(
                n_workers=max_workers, processes=processes,
            ) as cluster:
                with distributed.Client(cluster) as client:
                    yield ClientExecutor(client)
    else:
        raise NotImplementedError("That kind is not implemented")
github erdc / quest / quest / api / tasks.py View on Github external
def __init__(self, n_cores=None):
        if n_cores is None:
            n_cores = psutil.cpu_count()-2
        self.cluster = LocalCluster(nanny=True, n_workers=1)
        self.client = Client(self.cluster)
github MolSSI / QCFractal / dqm_server / dqm_server.py View on Github external
if options.mongod_username:
            mongo_username = options.mongod_username
        if options.mongod_password:
            mongo_password = options.mongod_password
        # Build mongo socket
        self.mongod_socket = dqm.mongo_helper.MongoSocket(options.mongod_ip, options.mongod_port, username=mongo_username, password=mongo_password, globalAuth=True)

        self.logger.info("Mongod Socket Info:")
        self.logger.info(str(self.mongod_socket) + "\n")

        loop = tornado.ioloop.IOLoop.current()
        self.local_cluster = None
        if options.queue == "dask":
            # Grab the Dask Scheduler
            if options.dask_ip == "":
                self.local_cluster = distributed.LocalCluster(nanny=None)
                self.queue_socket = distributed.Client(self.local_cluster)
            else:
                self.queue_socket = distributed.Client(options.dask_ip + ":" + str(options.dask_port))

            self.logger.info("Dask Scheduler Info:")
            self.logger.info(str(self.queue_socket) + "\n")

            # Make sure the scratch is there
            if not os.path.exists(dask_working_dir):
                os.makedirs(dask_working_dir)

            # Dask Nanny
            self.queue_nanny = dqm.handlers.DaskNanny(self.queue_socket, self.mongod_socket, logger=self.logger)

            scheduler = dqm.handlers.DaskScheduler
        else:
github jcmgray / xyzpy / xyzpy / gen / dask_stuff.py View on Github external
def _distributed_client(n=None):
    """Return a dask.distributed client, but cache it.
    """
    import distributed
    cluster = distributed.LocalCluster(n, scheduler_port=0)
    client = distributed.Client(cluster)
    return client
github HerculesJack / bayesfast / bayesfast / utils / client.py View on Github external
# try to get the existing client; will create a new one if failed
            try:
                client = get_client()
                return client, False
            except:
                pass
        else:
            try:
                client = int(client)
                assert client > 0
            except:
                warnings.warn('I do not know how to get a client from what you '
                              'gave me. Falling back to client=None for now.',
                              RuntimeWarning)
                client = None
        cluster = LocalCluster(n_workers=client, threads_per_worker=1)
        client = Client(cluster)
        return client, True