Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
from .models import WorkerState, TaskState
from .utils import fromtimestamp, correct_awareness
WORKER_UPDATE_FREQ = 60 # limit worker timestamp write freq.
SUCCESS_STATES = frozenset([states.SUCCESS])
# Expiry can be timedelta or None for never expire.
EXPIRE_SUCCESS = getattr(settings, 'CELERYCAM_EXPIRE_SUCCESS',
timedelta(days=1))
EXPIRE_ERROR = getattr(settings, 'CELERYCAM_EXPIRE_ERROR',
timedelta(days=3))
EXPIRE_PENDING = getattr(settings, 'CELERYCAM_EXPIRE_PENDING',
timedelta(days=5))
NOT_SAVED_ATTRIBUTES = frozenset(['name', 'args', 'kwargs', 'eta'])
logger = get_logger(__name__)
debug = logger.debug
class Camera(Polaroid):
TaskState = TaskState
WorkerState = WorkerState
clear_after = True
worker_update_freq = WORKER_UPDATE_FREQ
expire_states = {
SUCCESS_STATES: EXPIRE_SUCCESS,
states.EXCEPTION_STATES: EXPIRE_ERROR,
states.UNREADY_STATES: EXPIRE_PENDING,
}
def __init__(self, *args, **kwargs):
from celery.utils.nodenames import gethostname
from celery.utils.serialization import get_pickled_exception
from celery.utils.time import maybe_iso8601, maybe_make_aware, timezone
from kombu.utils.encoding import safe_repr, safe_str
from kombu.utils.objects import cached_property
from . import state
__all__ = ('Request',)
# pylint: disable=redefined-outer-name
# We cache globals and attribute lookups, so disable this warning.
IS_PYPY = hasattr(sys, 'pypy_version_info')
logger = get_logger(__name__)
debug, info, warn, error = (logger.debug, logger.info,
logger.warning, logger.error)
_does_info = False
_does_debug = False
def __optimize__():
# this is also called by celery.app.trace.setup_worker_optimizations
global _does_debug
global _does_info
_does_debug = logger.isEnabledFor(logging.DEBUG)
_does_info = logger.isEnabledFor(logging.INFO)
__optimize__() # noqa: E305
# -*- coding: utf-8 -*-
from architect.repository.client import BaseClient
from celery.utils.log import get_logger
logger = get_logger(__name__)
class EspClient(BaseClient):
def __init__(self, **kwargs):
super(EspClient, self).__init__(**kwargs)
def check_status(self):
return True
def generate_image(self, params):
return True
format = format or self.format
colorize = self.supports_color(colorize, logfile)
reset_multiprocessing_logger()
ensure_process_aware_logger()
receivers = signals.setup_logging.send(
sender=None, loglevel=loglevel, logfile=logfile,
format=format, colorize=colorize,
)
if not receivers:
root = logging.getLogger()
if self.app.conf.CELERYD_HIJACK_ROOT_LOGGER:
root.handlers = []
get_logger('celery').handlers = []
get_logger('celery.task').handlers = []
get_logger('celery.redirected').handlers = []
# Configure root logger
self._configure_logger(
root, logfile, loglevel, format, colorize, **kwargs
)
# Configure the multiprocessing logger
self._configure_logger(
get_multiprocessing_logger(),
logfile, loglevel if MP_LOG else logging.ERROR,
format, colorize, **kwargs
)
signals.after_setup_logger.send(
sender=None, logger=root,
``Control`` -> :mod:`celery.worker.pidbox` -> :mod:`kombu.pidbox`.
The actual commands are implemented in :mod:`celery.worker.control`.
"""
from __future__ import absolute_import, unicode_literals
from celery import bootsteps
from celery.utils.log import get_logger
from celery.worker import pidbox
from .tasks import Tasks
__all__ = ('Control',)
logger = get_logger(__name__)
class Control(bootsteps.StartStopStep):
"""Remote control command service."""
requires = (Tasks,)
def __init__(self, c, **kwargs):
self.is_green = c.pool is not None and c.pool.is_green
self.box = (pidbox.gPidbox if self.is_green else pidbox.Pidbox)(c)
self.start = self.box.start
self.stop = self.box.stop
self.shutdown = self.box.shutdown
super(Control, self).__init__(c, **kwargs)
def include_if(self, c):
from django.core.exceptions import ObjectDoesNotExist
from .models import (PeriodicTask, PeriodicTasks,
CrontabSchedule, IntervalSchedule)
from .utils import DATABASE_ERRORS, make_aware
# This scheduler must wake up more frequently than the
# regular of 5 minutes because it needs to take external
# changes to the schedule into account.
DEFAULT_MAX_INTERVAL = 5 # seconds
ADD_ENTRY_ERROR = """\
Couldn't add entry %r to database schedule: %r. Contents: %r
"""
logger = get_logger(__name__)
debug, info, error = logger.debug, logger.info, logger.error
class ModelEntry(ScheduleEntry):
model_schedules = ((schedules.crontab, CrontabSchedule, 'crontab'),
(schedules.schedule, IntervalSchedule, 'interval'))
save_fields = ['last_run_at', 'total_run_count', 'no_changes']
def __init__(self, model):
self.app = current_app._get_current_object()
self.name = model.name
self.task = model.task
try:
self.schedule = model.schedule
except model.DoesNotExist:
logger.error('Schedule was removed from database')
from celery.five import items, monotonic, string, string_t
from celery.platforms import signals as _signals
from celery.utils import fun_takes_kwargs
from celery.utils.functional import noop
from celery.utils.log import get_logger
from celery.utils.serialization import get_pickled_exception
from celery.utils.text import truncate
from celery.utils.timeutils import maybe_iso8601, timezone, maybe_make_aware
from . import state
__all__ = ['Request']
IS_PYPY = hasattr(sys, 'pypy_version_info')
logger = get_logger(__name__)
debug, info, warn, error = (logger.debug, logger.info,
logger.warning, logger.error)
_does_info = False
_does_debug = False
#: Max length of result representation
RESULT_MAXLEN = 128
def __optimize__():
# this is also called by celery.app.trace.setup_worker_optimizations
global _does_debug
global _does_info
_does_debug = logger.isEnabledFor(logging.DEBUG)
_does_info = logger.isEnabledFor(logging.INFO)
__optimize__()
try:
from celery.utils.time import is_naive
except ImportError:
from celery.utils.timeutils import is_naive
from gitlab_tools.models.celery import PeriodicTask, PeriodicTasks, CrontabSchedule, IntervalSchedule
from gitlab_tools.extensions import db
DEFAULT_MAX_INTERVAL = 5
ADD_ENTRY_ERROR = """\
Couldn't add entry %r to database schedule: %r. Contents: %r
"""
logger = get_logger(__name__)
debug, info, error = logger.debug, logger.info, logger.error
class ModelEntry(ScheduleEntry):
model_schedules = ((schedules.crontab, CrontabSchedule, 'crontab'),
(schedules.schedule, IntervalSchedule, 'interval'))
save_fields = ['last_run_at', 'total_run_count', 'no_changes']
def __init__(self, model, session=None):
self.app = current_app
self.session = session or db.session
self.name = model.name
self.task = model.task
self.schedule = model.schedule
self.args = json.loads(model.args or '[]')
self.kwargs = json.loads(model.kwargs or '{}')
from .components import Pool
try: # pragma: no cover
import pyinotify
_ProcessEvent = pyinotify.ProcessEvent
except ImportError: # pragma: no cover
pyinotify = None # noqa
_ProcessEvent = object # noqa
__all__ = [
'WorkerComponent', 'Autoreloader', 'Monitor', 'BaseMonitor',
'StatMonitor', 'KQueueMonitor', 'InotifyMonitor', 'file_hash',
]
logger = get_logger(__name__)
class WorkerComponent(bootsteps.StartStopStep):
label = 'Autoreloader'
conditional = True
requires = (Pool, )
def __init__(self, w, autoreload=None, **kwargs):
self.enabled = w.autoreload = autoreload
w.autoreloader = None
def create(self, w):
w.autoreloader = self.instantiate(w.autoreloader_cls, w)
return w.autoreloader if not w.use_eventloop else None
def register_with_event_loop(self, w, hub):
from __future__ import absolute_import
import socket
import threading
from kombu.common import QoS
from celery.datastructures import AttributeDict
from celery.utils.log import get_logger
from celery.worker.bootsteps import StartStopComponent
from celery.worker.control import Panel
from celery.worker.heartbeat import Heart
logger = get_logger(__name__)
info, error, debug = logger.info, logger.error, logger.debug
class ConsumerConnection(StartStopComponent):
name = 'consumer.connection'
delay_shutdown = True
def __init__(self, c, **kwargs):
c.connection = None
def start(self, c):
debug('Re-establishing connection to the broker...')
c.connection = c._open_connection()
# Re-establish the broker connection and setup the task consumer.
info('consumer: Connected to %s.', c.connection.as_uri())