How to use the execo.Remote function in execo

To help you get started, we’ve selected a few execo examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github elavoie / pando-computing / test / test-g5k.py View on Github external
def default(self, line):
        global interrupted, workers, cores
        interrupted = False
        print 'interrupting previous command'
        workers.kill()
        execo.sleep(1)
        print 'sending command: ' + line
        workers = execo.Remote(
                line,
                cores).start()
github elavoie / pando-computing / test / test-g5k.py View on Github external
print 'sending command: ' + line
        workers = execo.Remote(
                line,
                cores).start()

app = App()

if jobid:
    try:
        print 'Waiting for job to start'
        execo_g5k.wait_oar_job_start(jobid, site)
        print 'Retrieving nodes'
        nodes = execo_g5k.get_oar_job_nodes(jobid, site)
        # Setup nodes
        print 'Preparing workers with cmd: ' + setup_cmd
        workers = execo.Remote(
                setup_cmd,
                nodes).start()
        workers.expect('Worker Setup Completed')
        workers.kill()
        # Possibly open more than one connection per machine
        cores = nodes * args.nb_cores
        print cores
        print 'Example cmd: %s' % (workers_cmd)
        app.prompt = '%s (%d node(s), %d core(s)/node)> ' % (site, args.volunteers, args.nb_cores)
	app.cmdloop()
        # execo.sleep(600)
        # print 'Workers done'

    finally:
        execo_g5k.oardel([(jobid, site)])
github BeyondTheClouds / enos / engine / g5k_engine.py View on Github external
def exec_command_on_nodes(self, nodes, cmd, label, conn_params=None):
        """Execute a command on a node (id or hostname) or on a set of nodes"""
        if not isinstance(nodes, list):
            nodes = [nodes]

        if conn_params is None:
            conn_params = DEFAULT_CONN_PARAMS

        logger.info(label)
        remote = EX.Remote(cmd, nodes, conn_params)
        remote.run()

        if not remote.finished_ok:
            sys.exit(31)
github mliroz / hadoop_g5k / hadoop_g5k / cluster_v2.py View on Github external
def _initialize_conf(self):
        """Merge locally-specified configuration files with default files
        from the distribution"""

        action = Remote("cp " + os.path.join(self.conf_dir,
                                             MR_CONF_FILE + ".template ") +
                        os.path.join(self.conf_dir, MR_CONF_FILE),
                        self.hosts)
        action.run()

        super(HadoopV2Cluster, self)._initialize_conf()