How to use the kombu.Exchange function in kombu

To help you get started, we’ve selected a few kombu examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github wazo-platform / wazo-calld / integration_tests / suite / base.py View on Github external
def listen_bus_events(cls, routing_key):
        exchange = Exchange(BUS_EXCHANGE_NAME, type=BUS_EXCHANGE_TYPE)
        with Connection(BUS_URL) as conn:
            queue = Queue(BUS_QUEUE_NAME, exchange=exchange, routing_key=routing_key, channel=conn.channel())
            queue.declare()
            queue.purge()
            cls.bus_queue = queue
github celery / kombu / t / unit / test_entity.py View on Github external
def test_multiple_bindings(self):
        chan = Mock()
        q = Queue('mul', [
            binding(Exchange('mul1'), 'rkey1'),
            binding(Exchange('mul2'), 'rkey2'),
            binding(Exchange('mul3'), 'rkey3'),
        ])
        q(chan).declare()
        assert call(
            nowait=False,
            exchange='mul1',
            auto_delete=False,
            passive=False,
            arguments=None,
            type='direct',
            durable=True,
        ) in chan.exchange_declare.call_args_list
github RackHD / RackHD / test / stream-monitor / stream_sources / amqp_od / rackhd_amqp_od.py View on Github external
def __assure_exchange(self, connection, exchange_name, exchange_type):
        exchange = Exchange(exchange_name, type=exchange_type)
        bound_exchange = exchange(connection)
        bound_exchange.declare()
github reviewboard / ReviewBot / bot / reviewbot / celery.py View on Github external
def create_queues():
    """Create the celery queues.

    Returns:
        list of kombu.Queue:
        The queues that this worker will listen to.
    """
    default_exchange = Exchange('celery', type='direct')
    queues = [
        Queue('celery', default_exchange, routing_key='celery'),
    ]

    # Detect the installed tools and select the corresponding
    # queues to consume from.
    for ep in pkg_resources.iter_entry_points(group='reviewbot.tools'):
        tool_class = ep.load()
        tool = tool_class()
        queue_name = '%s.%s' % (ep.name, tool_class.version)

        if tool.check_dependencies():
            if tool.working_directory_required:
                # Set up a queue for each configured repository. This way only
                # workers which have the relevant repository configured will
                # pick up applicable tasks.
github OnGridSystems / CATEd / djangoTrade / celery.py View on Github external
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
from django.conf import settings
from kombu import Queue, Exchange

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djangoTrade.settings')
app = Celery('djangoTrade')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
app.conf.task_queues = {
    Queue('high', Exchange('high'), routing_key='high'),
    Queue('normal', Exchange('normal'), routing_key='normal'),
    Queue('low', Exchange('low'), routing_key='low'),
    Queue('set_orders', Exchange('set_orders'), routing_key='set_orders'),
}
app.conf.task_default_queue = 'normal'
app.conf.task_default_exchange_type = 'direct'
app.conf.task_default_routing_key = 'normal'


@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
github StackStorm / st2 / st2common / st2common / transport / reactor.py View on Github external
'TriggerDispatcher',
    'get_sensor_cud_queue',
    'get_trigger_cud_queue',
    'get_trigger_instances_queue',
]

LOG = logging.getLogger(__name__)

# Exchange for Trigger CUD events
TRIGGER_CUD_XCHG = Exchange('st2.trigger', type='topic')

# Exchange for TriggerInstance events
TRIGGER_INSTANCE_XCHG = Exchange('st2.trigger_instances_dispatch', type='topic')

# Exchane for Sensor CUD events
SENSOR_CUD_XCHG = Exchange('st2.sensor', type='topic')


class SensorCUDPublisher(publishers.CUDPublisher):
    """
    Publisher responsible for publishing Trigger model CUD events.
    """

    def __init__(self):
        super(SensorCUDPublisher, self).__init__(exchange=SENSOR_CUD_XCHG)


class TriggerCUDPublisher(publishers.CUDPublisher):
    """
    Publisher responsible for publishing Trigger model CUD events.
    """
github pythonzm / Ops / Ops / celery.py View on Github external
Change Activity:
                    2018-07-16:
-------------------------------------------------
"""
import os
from celery import Celery
from kombu import Queue, Exchange

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Ops.settings')

app = Celery('Ops', broker='redis://127.0.0.1:6379/0')

app.conf.task_queues = (
    Queue('default', Exchange('default', type='direct'), routing_key='default'),
    Queue('ansible', Exchange('ansible', type='direct'), routing_key='ansible'),
    Queue('fort', Exchange('fort', type='direct'), routing_key='fort'),
    Queue('plan', Exchange('plan', type='direct'), routing_key='plan'),
    Queue('commons', Exchange('commons', type='direct'), routing_key='commons'),
)

app.conf.task_default_queue = 'default'
app.conf.task_default_exchange = 'default'
app.conf.task_default_exchange_type = 'direct'
app.conf.task_default_routing_key = 'default'
app.conf.timezone = 'Asia/Shanghai'
app.conf.enable_utc = False

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
github CenterForOpenScience / SHARE / project / settings.py View on Github external
MED_PRI_MODULES = {
    'share.tasks.HarvesterTask',
}

HIGH_PRI_MODULES = {
}

from kombu import Queue, Exchange
CELERY_QUEUES = (
    Queue(LOW_QUEUE, Exchange(LOW_QUEUE), routing_key=LOW_QUEUE,
          consumer_arguments={'x-priority': -10}),
    Queue(DEFAULT_QUEUE, Exchange(DEFAULT_QUEUE), routing_key=DEFAULT_QUEUE,
          consumer_arguments={'x-priority': 0}),
    Queue(MED_QUEUE, Exchange(MED_QUEUE), routing_key=MED_QUEUE,
          consumer_arguments={'x-priority': 20}),
    Queue(HIGH_QUEUE, Exchange(HIGH_QUEUE), routing_key=HIGH_QUEUE,
          consumer_arguments={'x-priority': 30}),
)

CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
CELERY_ROUTES = ('share.celery.CeleryRouter', )
CELERY_IGNORE_RESULT = True
CELERY_STORE_ERRORS_EVEN_IF_IGNORED = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

# Logging
LOGGING = {
    'version': 1,
    'disable_existing_loggers': False,
    'formatters': {
        'console': {
            '()': 'colorlog.ColoredFormatter',
github reddit / baseplate.py / baseplate / clients / kombu.py View on Github external
arguments given to this function will be passed through to the
    :py:class:`~kombu.Exchange` constructor. Keyword arguments take precedence
    over the configuration file.

    Supported keys:

    * ``exchange_name``
    * ``exchange_type``

    """
    assert prefix.endswith(".")
    parser = config.SpecParser(
        {"exchange_name": config.Optional(config.String), "exchange_type": config.String}
    )
    options = parser.parse(prefix[:-1], app_config)
    return Exchange(name=options.exchange_name or "", type=options.exchange_type, **kwargs)
github nameko / nameko / newrpc / __init__.py View on Github external
def get_fanout_exchange(topic, channel=None):
    return Exchange(name='{}_fanout'.format(topic),
            channel=channel,
            type='fanout',
            durable=False,
            auto_delete=True)