How to use the parl.connect function in parl

To help you get started, we’ve selected a few parl examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github PaddlePaddle / PARL / examples / NeurIPS2019-Learn-to-Move-Challenge / evaluate.py View on Github external
def create_actors(self):
        """Connect to the cluster and start sampling of the remote actor.
        """
        parl.connect(args.cluster_address, ['official_obs_scaler.npz'])

        for i in range(args.actor_num):
            logger.info('Remote actor count: {}'.format(i + 1))

            remote_thread = threading.Thread(target=self.run_remote_sample)
            remote_thread.setDaemon(True)
            remote_thread.start()

        # There is a memory-leak problem in osim-rl package.
        # So we will dynamically add actors when remote actors killed due to excessive memory usage.
        time.sleep(10 * 60)
        parl_client = get_global_client()
        while True:
            if parl_client.actor_num < args.actor_num:
                logger.info(
                    'Dynamic adding acotr, current actor num:{}'.format(
github PaddlePaddle / PARL / examples / IMPALA / train.py View on Github external
def create_actors(self):
        """ Connect to the cluster and start sampling of the remote actor.
        """
        parl.connect(self.config['master_address'])

        logger.info('Waiting for {} remote actors to connect.'.format(
            self.config['actor_num']))

        for i in range(self.config['actor_num']):
            self.remote_count += 1
            logger.info('Remote actor count: {}'.format(self.remote_count))
            if self.start_time is None:
                self.start_time = time.time()

            remote_thread = threading.Thread(target=self.run_remote_sample)
            remote_thread.setDaemon(True)
            remote_thread.start()
github PaddlePaddle / PARL / examples / ES / train.py View on Github external
def create_actors(self):
        """ create actors for parallel training.
        """

        parl.connect(self.config['master_address'])
        self.remote_count = 0
        for i in range(self.config['actor_num']):
            signal_queue = queue.Queue()
            output_queue = queue.Queue()
            self.actors_signal_input_queues.append(signal_queue)
            self.actors_output_queues.append(output_queue)

            self.remote_count += 1

            remote_thread = threading.Thread(
                target=self.run_remote_sample,
                args=(signal_queue, output_queue))
            remote_thread.setDaemon(True)
            remote_thread.start()

        logger.info('All remote actors are ready, begin to learn.')
github PaddlePaddle / PARL / examples / A2C / train.py View on Github external
def create_actors(self):
        """ Connect to the cluster and start sampling of the remote actor.
        """
        parl.connect(self.config['master_address'])

        logger.info('Waiting for {} remote actors to connect.'.format(
            self.config['actor_num']))

        for i in six.moves.range(self.config['actor_num']):
            params_queue = queue.Queue()
            self.params_queues.append(params_queue)

            self.remote_count += 1
            logger.info('Remote actor count: {}'.format(self.remote_count))

            remote_thread = threading.Thread(
                target=self.run_remote_sample, args=(params_queue, ))
            remote_thread.setDaemon(True)
            remote_thread.start()
github PaddlePaddle / PARL / examples / NeurIPS2019-Learn-to-Move-Challenge / train.py View on Github external
def create_actors(self):
        """Connect to the cluster and start sampling of the remote actor.
        """
        parl.connect(args.cluster_address, ['official_obs_scaler.npz'])

        for i in range(args.actor_num):
            logger.info('Remote actor count: {}'.format(i + 1))

            remote_thread = threading.Thread(target=self.run_remote_sample)
            remote_thread.setDaemon(True)
            remote_thread.start()

        # There is a memory-leak problem in osim-rl package.
        # So we will dynamically add actors when remote actors killed due to excessive memory usage.
        time.sleep(10 * 60)
        parl_client = get_global_client()
        while True:
            if parl_client.actor_num < args.actor_num:
                logger.info(
                    'Dynamic adding acotr, current actor num:{}'.format(