How to use the billiard.Process function in billiard

To help you get started, we’ve selected a few billiard examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github celery / billiard / t / unit / test_spawn.py View on Github external
def test_set_pdeathsig(self):
        success = "done"
        q = Queue()
        p = Process(target=parent_task, args=(q, success))
        p.start()
        child_proc = psutil.Process(q.get(timeout=3))
        try:
            p.terminate()
            assert q.get(timeout=3) == success
        finally:
            child_proc.terminate()
github jschnurr / scrapyscript / src / scrapyscript / __init__.py View on Github external
class Job(object):
    """A job is a single request to call a specific spider. *args and **kwargs
    will be passed to the spider constructor.
    """

    def __init__(self, spider, *args, **kwargs):
        """Parms:
          spider (spidercls): the spider to be run for this job.
        """
        self.spider = spider
        self.args = args
        self.kwargs = kwargs


class Processor(Process):
    """ Start a twisted reactor and run the provided scrapy spiders.
    Blocks until all have finished.
    """

    def __init__(self, settings=None):
        """
        Parms:
          settings (scrapy.settings.Settings) - settings to apply.  Defaults
        to Scrapy default settings.
        """
        kwargs = {"ctx": __import__("billiard.synchronize")}

        self.results = Queue(**kwargs)
        self.items = []
        self.settings = settings or Settings()
        dispatcher.connect(self._item_scraped, signals.item_scraped)
github EventKit / eventkit-cloud / eventkit_cloud / utils / arcgis.py View on Github external
# Add autoconfiguration to base_config
        mapproxy_config = base_config()
        load_config(mapproxy_config, config_dict=conf_dict)
        #Create a configuration object
        mapproxy_configuration = ProxyConfiguration(mapproxy_config, seed=seed, renderd=None)

        seed_dict = get_seed_template(bbox=self.bbox, level_from=self.level_from, level_to=self.level_to)
        # Create a seed configuration object
        seed_configuration = SeedingConfiguration(seed_dict, mapproxy_conf=mapproxy_configuration)
        logger.error("Beginning seeding to {}".format(self.gpkgfile))
        logger.error(conf_dict)
        logger.error(seed_dict)
        # Call seeder using billiard without daemon, because of limitations of running child processes in python.
        try:
            p = Process(target=seeder.seed, daemon=False, kwargs={"tasks": seed_configuration.seeds(['seed']),
                                                                  "concurrency": 1})
            p.start()
            p.join()
        except Exception as e:
            logger.error("ArcGIS Export failed.")
            logger.error("Using Configuration:")
            logger.error(mapproxy_config)
            errors, informal_only = validate_options(mapproxy_config)
            if not informal_only:
                logger.error("Mapproxy configuration failed.")
                logger.error("Using Configuration:")
                logger.error(mapproxy_config)
                raise ConfigurationError('Mapproxy configuration error - {}'.format(', '.join(errors)))
            errors, informal_only = validate_seed_conf(seed_dict)
            if not informal_only:
                logger.error("Mapproxy Seed failed.")
github splunk / eventgen / lib / generatorworker.py View on Github external
def __init__(self, num, q1, q2):
        self.worker = GeneratorRealWorker(num, q1, q2, self.stop)

        multiprocessing.Process.__init__(self)
github jschnurr / scrapyscript / src / scrapyscript / __init__.py View on Github external
def run(self, jobs):
        """Start the Scrapy engine, and execute all jobs.  Return consolidated results
        in a single list.

        Parms:
          jobs ([Job]) - one or more Job objects to be processed.

        Returns:
          List of objects yielded by the spiders after all jobs have run.
        """
        if not isinstance(jobs, collections.abc.Iterable):
            jobs = [jobs]
        self.validate(jobs)

        p = Process(target=self._crawl, args=[jobs])
        p.start()
        p.join()
        p.terminate()

        return self.results.get()
github mike-lawrence / pytracker / __init__.py View on Github external
def __init__(self,camIndex,camRes,timestampMethod):
		self.qTo = billiard.Queue()
		self.qFrom = billiard.Queue()
		self.process = billiard.Process( target=cameraLoop.loop , args=(self.qTo,self.qFrom,camIndex,camRes,timestampMethod) )
	def start(self):
github smathot / OpenSesame / libqtopensesame / misc / process.py View on Github external
def __init__(self, exp, output):

		"""
		Constructor.

		Arguments
		exp		--	An instance of libopensesame.experiment.experiment
		output	--	A reference to the queue object created in and used to
					communicate with the main process.
		"""

		multiprocessing.Process.__init__(self)
		self.output = output
		# The experiment object is troublesome to serialize,
		# therefore pull out all relevant data to pass on to the new process
		# and rebuild the exp object in there.
		self.script = exp.to_string()
		self.pool_folder = exp.pool.folder()
		self.subject_nr = exp.var.subject_nr
		self.experiment_path = exp.experiment_path
		self.fullscreen = exp.var.fullscreen == u'yes'
		self.logfile = exp.logfile
		self.auto_response = exp.auto_response
		self.killed = False