Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
#!/usr/bin/env python
import pika
import sys
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
message = ' '.join(sys.argv[1:]) or "Hello World!"
channel.basic_publish(
exchange='',
routing_key='task_queue',
body=message,
properties=pika.BasicProperties(
delivery_mode=2, # make message persistent
))
print(" [x] Sent %r" % message)
connection.close()
def connect(self, mq):
logging.getLogger('pika').setLevel(logging.ERROR)
credentials = pika.PlainCredentials(mq['username'], mq['password'])
self.connection = pika.BlockingConnection(
pika.ConnectionParameters(mq['host'], credentials=credentials))
self.result_channel = self.connection.channel()
self.properties = pika.BasicProperties(user_id=mq['username'])
self._queue = []
self._chunk = []
def _publish_request(self, correlation_id: str, body: Any) -> None:
self._channel.basic_publish(exchange='',
routing_key=self._queue,
properties=pika.BasicProperties(
reply_to=self._callback_queue,
correlation_id=correlation_id,
delivery_mode=2,
),
body=body)
def produce(self, n):
properties = pika.BasicProperties()
self._channel.basic_publish("", self.QUEUE,
str(n),
properties)
if n > 0:
self._connection.add_timeout(-1, lambda: self.produce(n-1))
else:
print "Done producing"
def call(self, predict_data):
data = json.dumps(predict_data)
self.response = None
self.corr_id = str(uuid.uuid4())
self.channel.basic_publish(exchange='',
routing_key='rpc_queue',
properties=pika.BasicProperties(
reply_to=self.callback_queue,
correlation_id=self.corr_id,
),
body=data)
while self.response is None:
self.connection.process_data_events()
return self.response
def emit_signal(self, signal_name, topic, **kwargs):
print "Emitting signal!!!! with: {0}".format(kwargs)
logger.warn("Emitting signal: {0}.{1}".format(topic, signal_name))
self.channel.basic_publish(
self.exchange,
'{0}.{1}'.format(topic, signal_name),
self._encode_json(kwargs, signal=True),
properties=pika.BasicProperties(
headers={
'no_reply': 1
}
Args:
body (dict): A JSON-serializable dictionary to send.
"""
queue = 'backends'
chan = self.connection.channel()
chan.exchange_declare(exchange=queue)
chan.queue_declare(queue, durable=True)
body['backend'] = self.name
chan.basic_publish(
exchange='',
routing_key=queue,
body=json.dumps(body),
properties=pika.BasicProperties(
delivery_mode=2
)
)
chan.close()
"""
body = build_event_body(**kwargs)
try:
connection = pika.BlockingConnection(pika.ConnectionParameters(config.RABBITMQ_HOST))
except:
lgr.warning('rabbitmq broker unreachable, event: %s will not be registered' % body)
return
channel = connection.channel()
channel.queue_declare(queue=config.RABBITMQ_QUEUE, durable=True)
channel.basic_publish(exchange=config.RABBITMQ_EXCHANGE,
routing_key=config.RABBITMQ_ROUTING_KEY,
body=body,
properties=pika.BasicProperties(delivery_mode=2))
connection.close()
"""
# Fit VIP frames in the PIKA properties dict
# VIP format - [SENDER, RECIPIENT, PROTO, USER_ID, MSG_ID, SUBSYS, ARGS...]
dct = {
'user_id': self._rmq_userid,
'app_id': self.routing_key, # Routing key of SENDER
'headers': dict(
recipient=destination_routing_key, # RECEIVER
proto=b'VIP', # PROTO
user=user, # USER_ID
),
'message_id': msg_id, # MSG_ID
'type': subsystem, # SUBSYS
'content_type': 'application/json'
}
properties = pika.BasicProperties(**dct)
msg = args # ARGS
# _log.debug("PUBLISHING TO CHANNEL {0}, {1}, {2}, {3}".format(destination_routing_key,
# msg,
# properties,
# self.routing_key))
try:
self.channel.basic_publish(self.exchange,
destination_routing_key,
jsonapi.dumps(msg, ensure_ascii=False),
properties)
except (pika.exceptions.AMQPConnectionError,
pika.exceptions.AMQPChannelError) as exc:
raise Unreachable(errno.EHOSTUNREACH, "Connection to RabbitMQ is lost",
'rabbitmq broker', 'rmq_connection')
logging.warning(
'Task %s failed to execute. The task has no remaining retries. ' % \
task['name'] )
entity = datastore.Entity(_TASKQUEUE_KIND,
name=str(task['name']), namespace='')
entity.update({'state': TaskStates.Failed, 'name': task['name']})
datastore.Put(entity)
else:
# Re-enqueue with updated number of tries
logging.warning(
'Task %s failed to execute. This task will retry.' % task['name'])
try:
self.channel.basic_publish(exchange='',
routing_key=self._queue_name,
body=json.dumps(task),
properties=pika.BasicProperties(
delivery_mode = 2, # make message persistent
))
except pika.exceptions.AMQPConnectionError, e:
ch.basic_reject(delivery_tag = method.delivery_tag, requeue = True)
self.connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
self.channel = self.connection.channel()
except pika.exceptions.AMQPConnectionError, e:
logging.error("Unable to connect to RabbitMQ: " + str(e))
except Exception, e:
logging.error("Unknown exception--unable to connect to RabbitMQ: " + \
str(e))
# TODO RabbitMQ's basic_publish and reject should be
# done transactionally to prevent race conditions and duplicate
# tasks being enqueued. The API does support transactions see:
# http://www.rabbitmq.com/amqp-0-9-1-reference.html