How to use the poap.controller.BasicWorkerThread function in POAP

To help you get started, we’ve selected a few POAP examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ilija139 / HORD / 19-CNN / pySOT_runner.py View on Github external
data = TorchOptim(seed=seed, server=server)

    
    # Create a strategy and a controller
    controller = ThreadController()
    controller.strategy = \
        SyncStrategyNoConstraints(
            worker_id=0, data=data,
            maxeval=maxeval, nsamples=nsamples,
            exp_design=LatinHypercube(dim=data.dim, npts=2*(data.dim+1)),
            response_surface=RBFInterpolant(surftype=CubicRBFSurface, maxp=maxeval),
            sampling_method=CandidateDYCORS(data=data, numcand=100*data.dim))

    # Launch the threads and give them access to the objective function
    for _ in range(nthreads):
        worker = BasicWorkerThread(controller, data.objfunction)
        controller.launch_worker(worker)

    # Run the optimization strategy
    result = controller.run()

    print('Best value found: {0}'.format(result.value))
    print('Best solution found: {0}\n'.format(
        np.array_str(result.params[0], max_line_width=np.inf,
                     precision=5, suppress_small=True)))

    millis = int(round(time.time() * 1000))
    print('Ended: ' + str(datetime.now()) + ' (' + str(millis) + ')')
github SanPen / GridCal / src / GridCal / Engine / Simulations / Optimization / optimization_driver.py View on Github external
controller.strategy = SRBFStrategy(max_evals=self.max_iter,
                                           opt_prob=self.problem,
                                           exp_design=sampler,
                                           surrogate=surrogate_model,
                                           asynchronous=True,
                                           batch_size=num_threads)

        print("Number of threads: {}".format(num_threads))
        print("Maximum number of evaluations: {}".format(self.max_iter))
        print("Strategy: {}".format(controller.strategy.__class__.__name__))
        print("Experimental design: {}".format(sampler.__class__.__name__))
        print("Surrogate: {}".format(surrogate_model.__class__.__name__))

        # Launch the threads and give them access to the objective function
        for _ in range(num_threads):
            worker = BasicWorkerThread(controller, self.problem.eval)
            controller.launch_worker(worker)

        # Run the optimization strategy
        result = controller.run()

        print('Best value found: {0}'.format(result.value))
        print('Best solution found: {0}\n'.format(np.array_str(result.params[0],
                                                               max_line_width=np.inf,
                                                               precision=4, suppress_small=True)))

        self.solution = result.params[0]

        # Extract function values from the controller
        self.optimization_values = np.array([o.value for o in controller.fevals])

        # send the finnish signal
github ilija139 / HORD / 15-CNN / pySOT_runner.py View on Github external
data = TorchOptim(seed=seed, server=server)

    
    # Create a strategy and a controller
    controller = ThreadController()
    controller.strategy = \
        SyncStrategyNoConstraints(
            worker_id=0, data=data,
            maxeval=maxeval, nsamples=nsamples,
            exp_design=SymmetricLatinHypercube(dim=data.dim, npts=2*(data.dim+1)),
            response_surface=RBFInterpolant(surftype=CubicRBFSurface, maxp=maxeval),
            sampling_method=CandidateDYCORS(data=data, numcand=500*data.dim))

    # Launch the threads and give them access to the objective function
    for _ in range(nthreads):
        worker = BasicWorkerThread(controller, data.objfunction)
        controller.launch_worker(worker)

    # Run the optimization strategy
    result = controller.run()

    print('Best value found: {0}'.format(result.value))
    print('Best solution found: {0}\n'.format(
        np.array_str(result.params[0], max_line_width=np.inf,
                     precision=5, suppress_small=True)))

    millis = int(round(time.time() * 1000))
    print('Ended: ' + str(datetime.now()) + ' (' + str(millis) + ')')
github ilija139 / HORD / 6-MLP / pySOT_runner.py View on Github external
data = TorchOptim(seed=seed, server=server)

    
    # Create a strategy and a controller
    controller = ThreadController()
    controller.strategy = \
        SyncStrategyNoConstraints(
            worker_id=0, data=data,
            maxeval=maxeval, nsamples=nsamples,
            exp_design=LatinHypercube(dim=data.dim, npts=2*(data.dim+1)),
            response_surface=RBFInterpolant(surftype=CubicRBFSurface, maxp=maxeval),
            sampling_method=CandidateDYCORS(data=data, numcand=100*data.dim))

    # Launch the threads and give them access to the objective function
    for _ in range(nthreads):
        worker = BasicWorkerThread(controller, data.objfunction)
        controller.launch_worker(worker)

    # Run the optimization strategy
    result = controller.run()

    print('Best value found: {0}'.format(result.value))
    print('Best solution found: {0}\n'.format(
        np.array_str(result.params[0], max_line_width=np.inf,
                     precision=5, suppress_small=True)))

    millis = int(round(time.time() * 1000))
    print('Ended: ' + str(datetime.now()) + ' (' + str(millis) + ')')
github ilija139 / HORD / 8-CNN / pySOT_runner.py View on Github external
data = TorchOptim(seed=seed, server=server)

    
    # Create a strategy and a controller
    controller = ThreadController()
    controller.strategy = \
        SyncStrategyNoConstraints(
            worker_id=0, data=data,
            maxeval=maxeval, nsamples=nsamples,
            exp_design=LatinHypercube(dim=data.dim, npts=2*(data.dim+1)),
            response_surface=RBFInterpolant(surftype=CubicRBFSurface, maxp=maxeval),
            sampling_method=CandidateDYCORS(data=data, numcand=100*data.dim))

    # Launch the threads and give them access to the objective function
    for _ in range(nthreads):
        worker = BasicWorkerThread(controller, data.objfunction)
        controller.launch_worker(worker)

    # Run the optimization strategy
    result = controller.run()

    print('Best value found: {0}'.format(result.value))
    print('Best solution found: {0}\n'.format(
        np.array_str(result.params[0], max_line_width=np.inf,
                     precision=5, suppress_small=True)))

    millis = int(round(time.time() * 1000))
    print('Ended: ' + str(datetime.now()) + ' (' + str(millis) + ')')
github dbindel / POAP / poap / controller.py View on Github external
def __init__(self, controller, objective):
        "Initialize the worker."
        super(BasicWorkerThread, self).__init__(controller)
        self.objective = objective