How to use the fasteners.interprocess_locked function in fasteners

To help you get started, we’ve selected a few fasteners examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github quarkslab / irma / brain / brain / controllers / scanctrl.py View on Github external
@interprocess_locked(interprocess_lock_path)
def new(frontend_scan_id, user, session):
    try:
        scan = Scan.get_scan(frontend_scan_id, user.id, session)
    except IrmaDatabaseResultNotFound:
        scan = Scan(frontend_scan_id, user.id)
        session.add(scan)
        session.commit()
    log.debug("scanid %s: user_id %s id %s",
              frontend_scan_id, user.id, scan.id)
    return scan
github quarkslab / irma / frontend / frontend / controllers / scanctrl.py View on Github external
@interprocess_locked(interprocess_lock_path)
def is_finished(scanid):
    with session_transaction() as session:
        scan = Scan.load_from_ext_id(scanid, session=session)
        if scan.finished() and scan.status != IrmaScanStatus.finished:
            scan.set_status(IrmaScanStatus.finished)
            session.commit()
            # launch flush celery task on brain
            log.debug("scanid: %s calling scan_flush", scan.external_id)
            celery_brain.scan_flush(scan.external_id)
github quarkslab / irma / frontend / api / scans / services.py View on Github external
@interprocess_locked(interprocess_lock_path)
def is_finished(scan_id):
    with session_transaction() as session:
        scan = Scan.load_from_ext_id(scan_id, session)
        log.debug("scan %s: is_finished %d/%d", scan_id,
                  scan.probes_finished, scan.probes_total)
        if scan.finished() and scan.status != IrmaScanStatus.finished:
            # call finished hook for each files
            for file_ext in scan.files_ext:
                file_ext.hook_finished()
            scan.set_status(IrmaScanStatus.finished)
            session.commit()
            # launch flush celery task on brain
            log.debug("scan %s: calling scan_flush", scan.external_id)
            celery_brain.scan_flush(scan.external_id)
github quarkslab / irma / brain / tasks.py View on Github external
@interprocess_locked(interprocess_lock_path)
def active_probes():
    global cache_probelist
    # get active queues list from probe celery app
    now = time.time()
    if cache_probelist['time'] is not None:
        cache_time = now - cache_probelist['time']
    if cache_probelist['time'] is None or cache_time > PROBELIST_CACHE_TIME:
        log.debug("refreshing cached list")
        # scan all active queues except result queue
        # to list all probes queues ready
        plist = []
        queues = probe_app.control.inspect().active_queues()
        if queues:
            result_queue = config.brain_config['broker_probe'].queue
            for queuelist in queues.values():
                for queue in queuelist:
github ParaToolsInc / taucmdr / packages / taucmdr / model / experiment.py View on Github external
    @fasteners.interprocess_locked(os.path.join(highest_writable_storage().prefix, '.lock'))
    def configure(self):
        """Sets up the Experiment for a new trial.

        Installs or configures TAU and all its dependencies.  After calling this
        function, the experiment is ready to operate on the user's application.

        Returns:
            TauInstallation: Object handle for the TAU installation.
        """
        from taucmdr.cf.software.tau_installation import TauInstallation
        LOGGER.debug("Configuring experiment %s", self['name'])
        with fasteners.InterProcessLock(os.path.join(PROJECT_STORAGE.prefix, '.lock')):
            populated = self.populate(defaults=True)
        target = populated['target']
        application = populated['application']
        measurement = populated['measurement']
github ParaToolsInc / taucmdr / packages / taucmdr / model / experiment.py View on Github external
    @fasteners.interprocess_locked(os.path.join(highest_writable_storage().prefix, '.lock'))
    def configure(self):
        """Sets up the Experiment for a new trial.

        Installs or configures TAU and all its dependencies.  After calling this
        function, the experiment is ready to operate on the user's application.

        Returns:
            TauInstallation: Object handle for the TAU installation.
        """
        from taucmdr.cf.software.tau_installation import TauInstallation
        LOGGER.debug("Configuring experiment %s", self['name'])
        with fasteners.InterProcessLock(os.path.join(PROJECT_STORAGE.prefix, '.lock')):
            populated = self.populate(defaults=True)
        target = populated['target']
        application = populated['application']
        measurement = populated['measurement']
github IRC-SPHERE / HyperStream / online_engine.py View on Github external
    @fasteners.interprocess_locked('/tmp/hyperstream.lock')
    def execute(self, debug=False):
        """
        Execute the engine - currently simple executes all workflows.
        """

        if debug:
            # Set some default times for execution (debugging)
            start_time = datetime(year=2016, month=10, day=19, hour=12, minute=28, tzinfo=UTC)
            duration = timedelta(seconds=5)
            end_time = start_time + duration

            relative_interval = RelativeTimeInterval(0, 0)
            time_interval = TimeInterval(start_time, end_time)
            # workflow_id = "lda_localisation_model_predict"
        else:
            duration = 0  # not needed
github quarkslab / irma / brain / brain / controllers / scanctrl.py View on Github external
@interprocess_locked(interprocess_lock_path)
def flush(scan, session):
    if scan.status == IrmaScanStatus.flushed:
        log.info("scan_id %s: already flushed", scan.scan_id)
        return
    log.debug("scan_id %s: flush scan %s", scan.scan_id, scan.id)
    ftpuser = scan.user.ftpuser
    log.debug("Flushing files %s", scan.files)
    ftp_ctrl.flush(ftpuser, scan.files)
    log.debug("scan_id %s: delete %s jobs", scan.scan_id, len(scan.jobs))
    for job in scan.jobs:
        session.delete(job)
    set_status(scan, IrmaScanStatus.flushed, session)
    return
github quarkslab / irma / brain / brain / scan_tasks.py View on Github external
@interprocess_locked(interprocess_lock_path)
def register_probe(name, display_name, category, mimetype_filter):
    try:
        log.info("probe %s category %s registered [%s] transfer to scan_app",
                 name, category, mimetype_filter)
        with session_transaction() as session:
            probe_ctrl.register(name, display_name, category,
                                mimetype_filter, session)
    except Exception as e:
        log.exception(type(e).__name__ + " : " + str(e))
        raise register_probe.retry(countdown=5, max_retries=3, exc=e)