How to use the celery.signals function in celery

To help you get started, we’ve selected a few celery examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github RentMethod / celerytest / celerytest / worker.py View on Github external
def run(self):
        signals.worker_init.connect(self.on_worker_init)
        signals.worker_ready.connect(self.on_worker_ready)

        self.monitor.daemon = self.daemon
        self.monitor.start()

        worker = self.app.Worker()
        if hasattr(worker, 'start'):
            worker.start()
        elif hasattr(worker, 'run'):
            worker.run()
        else:
            raise Exception("Don't know how to start worker. Incompatible Celery?")
github celery / celery / t / unit / bin / test_worker.py View on Github external
        @signals.setup_logging.connect
        def on_logging_setup(**kwargs):
            logging_setup[0] = True
github refinery-platform / refinery-platform / refinery / data_set_manager / tasks.py View on Github external
@celery.signals.worker_init.connect
def on_worker_init(sender, **kwargs):
    # required to connect update_solr_index handler to task_postrun signal
    # https://github.com/celery/celery/issues/1873#issuecomment-35288899
    celery.signals.task_postrun.connect(
        post_process_file_import, sender=sender.app.tasks[FileImportTask.name]
    )
github celery / celery / celery / app / trace.py View on Github external
def handle_retry(self, task, req, store_errors=True, **kwargs):
        """Handle retry exception."""
        # the exception raised is the Retry semi-predicate,
        # and it's exc' attribute is the original exception raised (if any).
        type_, _, tb = sys.exc_info()
        try:
            reason = self.retval
            einfo = ExceptionInfo((type_, reason, tb))
            if store_errors:
                task.backend.mark_as_retry(
                    req.id, reason.exc, einfo.traceback, request=req,
                )
            task.on_retry(reason.exc, req.id, req.args, req.kwargs, einfo)
            signals.task_retry.send(sender=task, request=req,
                                    reason=reason, einfo=einfo)
            info(LOG_RETRY, {
                'id': req.id,
                'name': get_task_name(req, task.name),
                'exc': text_t(reason),
            })
            return einfo
        finally:
            del tb
github ansible / awx / awx / lib / site-packages / celery / app / base.py View on Github external
def autodiscover_tasks(self, packages, related_name='tasks', force=False):
        if force:
            return self._autodiscover_tasks(packages, related_name)
        signals.import_modules.connect(promise(
            self._autodiscover_tasks, (packages, related_name),
        ), weak=False, sender=self)
github inveniosoftware / flask-celeryext / flask_celeryext / app.py View on Github external
"""Create a Celery application."""
    celery = current_celery_app

    if CELERY_4_OR_GREATER:
        v3tov4config(flask_app.config, V3TOV4MAPPING)
        celery.config_from_object(flask_app.config, namespace='CELERY')  # pragma: no cover
    else:
        celery.config_from_object(flask_app.config)  # pragma: no cover

    celery.Task = AppContextTask

    # Set Flask application object on the Celery application.
    if not hasattr(celery, 'flask_app'):
        celery.flask_app = flask_app

    signals.after_setup_task_logger.connect(setup_task_logger)
    return celery
github 007gzs / dingtalk-django-example / example / celery.py View on Github external
import os
from celery import Celery, signals
from django.utils.log import configure_logging


# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'example.settings')


def configure_logger(conf=None, **kwargs):
    from django.conf import settings
    configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)


signals.worker_process_init.connect(configure_logger)
signals.beat_init.connect(configure_logger)

app = Celery('example')

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')

# Load task modules from all registered Django app configs.
app.autodiscover_tasks()


@app.task(bind=True)
def _async_call(self, func, args, kwargs):
    return func(*args, **kwargs)
github mozilla / zamboni-lib / packages / celery / celery / execute / trace.py View on Github external
def execute(self):
        signals.task_prerun.send(sender=self.task, task_id=self.task_id,
                                 task=self.task, args=self.args,
                                 kwargs=self.kwargs)
        retval = self._trace()

        signals.task_postrun.send(sender=self.task, task_id=self.task_id,
                                  task=self.task, args=self.args,
                                  kwargs=self.kwargs, retval=retval)
        return retval
github getsentry / freight / freight / utils / celery.py View on Github external
def init_app(self, app):
        self.__flask_context = []
        self.__flask_app = app
        self.conf.update(app.config)
        signals.task_prerun.connect(self.on_task_prerun)
        signals.task_postrun.connect(self.on_task_postrun)
github mback2k / django-celery-model / djcelery_model / models.py View on Github external
@signals.task_prerun.connect
def handle_task_prerun(sender=None, task_id=None, **kwargs):
    if task_id:
        queryset = ModelTaskMeta.objects.filter(task_id=task_id)
        queryset.update(state=ModelTaskMetaState.STARTED)