How to use the zmq.DEALER function in zmq

To help you get started, we’ve selected a few zmq examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github sagemath / sagecell / kernel_provider.py View on Github external
def __init__(self, dealer_address, dir):
        self.is_active = False
        self.dir = dir
        try:
            os.mkdir(dir)
            logger.warning("created parent directory for kernels, "
                "consider doing it yourself with appropriate attributes")
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise
        context = zmq.Context()
        context.IPV6 = 1
        self.dealer = context.socket(zmq.DEALER)
        logger.debug("connecting to %s", address)
        self.dealer.connect(address)
        self.dealer.send_json("get settings")
        if not self.dealer.poll(5000):
            logger.debug("dealer does not answer, terminating")
            exit(1)
        reply = self.dealer.recv_json()
        logger.debug("received %s", reply)
        assert reply[0] == "settings"
        self.preforked_rlimits = reply[1].pop("preforked_rlimits")
        self.max_kernels = reply[1].pop("max_kernels")
        self.max_preforked = reply[1].pop("max_preforked")
        self.waiter = context.socket(zmq.PULL)
        self.waiter_port = self.waiter.bind_to_random_port("tcp://*")
        self.kernels = dict()   # id: KernelProcess
        self.forking = None
github target / strelka / server / lib.py View on Github external
def setup_zmq(self):
        """Establishes ZMQ socket and poller.

        This method creates a ZMQ socket that connects to the broker and
        creates a ZMQ poller to read broker messages on. The DEALER socket
        will automatically reconnect to the broker in case of disconnection.
        """
        logging.debug(f"{self.name} ({self.identity.decode()}): connecting"
                      " to broker")
        context = zmq.Context()
        self.task_socket = context.socket(zmq.DEALER)
        self.task_socket.setsockopt(zmq.IDENTITY, self.identity)
        self.task_socket.setsockopt(zmq.RECONNECT_IVL, self.task_socket_reconnect)
        self.task_socket.setsockopt(zmq.RECONNECT_IVL_MAX, self.task_socket_reconnect_max)
        self.task_socket.connect(f"tcp://{self.broker}:{self.task_socket_port}")
        self.task_poller = zmq.Poller()
        self.task_poller.register(self.task_socket, zmq.POLLIN)
github funcx-faas / funcX / funcx / executors / high_throughput / interchange.py View on Github external
self.client_address = client_address
        self.interchange_address = interchange_address
        self.suppress_failure = suppress_failure
        self.poll_period = poll_period

        self.serializer = FuncXSerializer()
        logger.info("Attempting connection to client at {} on ports: {},{},{}".format(
            client_address, client_ports[0], client_ports[1], client_ports[2]))
        self.context = zmq.Context()
        self.task_incoming = self.context.socket(zmq.DEALER)
        self.task_incoming.set_hwm(0)
        self.task_incoming.RCVTIMEO = 10  # in milliseconds
        logger.info("Task incoming on tcp://{}:{}".format(client_address, client_ports[0]))
        self.task_incoming.connect("tcp://{}:{}".format(client_address, client_ports[0]))

        self.results_outgoing = self.context.socket(zmq.DEALER)
        self.results_outgoing.set_hwm(0)
        logger.info("Results outgoing on tcp://{}:{}".format(client_address, client_ports[1]))
        self.results_outgoing.connect("tcp://{}:{}".format(client_address, client_ports[1]))

        self.command_channel = self.context.socket(zmq.DEALER)
        self.command_channel.RCVTIMEO = 1000  # in milliseconds
        # self.command_channel.set_hwm(0)
        logger.info("Command channel on tcp://{}:{}".format(client_address, client_ports[2]))
        self.command_channel.connect("tcp://{}:{}".format(client_address, client_ports[2]))
        logger.info("Connected to client")

        self.pending_task_queue = {}
        self.containers = {}
        self.total_pending_task_count = 0
        self.fxs = FuncXClient()
github cocaine / cocaine-core / scripts / cocaine-pause-app.py View on Github external
def main(apps):
    context = zmq.Context()
    
    request = context.socket(zmq.DEALER)
    request.connect('tcp://localhost:5000')

    # Pausing the apps
    request.send_multipart([
        msgpack.packb(SLOT_PAUSE_APP),
        msgpack.packb([apps])
    ])

    pprint(msgpack.unpackb(request.recv()))
github yl-1993 / hfsoftmax / models / ext_layers / paramclient.py View on Github external
def __init__(self, _id):
        print('Connecting to ParamServer ...')
        context = zmq.Context()
        socket = context.socket(zmq.DEALER)
        identity = u'%d' % _id
        socket.identity = identity.encode('ascii')
        socket.connect('tcp://localhost:5570')
        print('Client %s started' % (identity))
        self._poll = zmq.Poller()
        self._poll.register(socket, zmq.POLLIN)
        self._socket = socket
        self._context = context
        self._register()
github domogik / domogik / src / domogik / mq / reqrep / worker.py View on Github external
def _create_stream(self):
        """Helper to create the socket and the stream.
        """
        socket = self.context.socket(zmq.DEALER)
        ioloop = IOLoop.instance()
        self.stream = ZMQStream(socket, ioloop)
        self.stream.on_recv(self._on_message)
        self.stream.socket.setsockopt(zmq.LINGER, 0)
        self.stream.connect(self.endpoint)
        self.ticker = PeriodicCallback(self._tick, self.HB_INTERVAL)
        self._send_ready()
        self.ticker.start()
        return
github funcx-faas / funcX / funcx / executor / parsl / executors / extreme_scale / mpi_worker_pool.py View on Github external
heartbeat_period : int
             Number of seconds after which a heartbeat message is sent to the interchange

        """
        self.uid = uid

        self.context = zmq.Context()
        self.task_incoming = self.context.socket(zmq.DEALER)
        self.task_incoming.setsockopt(zmq.IDENTITY, uid.encode('utf-8'))
        # Linger is set to 0, so that the manager can exit even when there might be
        # messages in the pipe
        self.task_incoming.setsockopt(zmq.LINGER, 0)
        self.task_incoming.connect(task_q_url)

        self.result_outgoing = self.context.socket(zmq.DEALER)
        self.result_outgoing.setsockopt(zmq.IDENTITY, uid.encode('utf-8'))
        self.result_outgoing.setsockopt(zmq.LINGER, 0)
        self.result_outgoing.connect(result_q_url)

        logger.info("Manager connected")
        self.max_queue_size = max_queue_size + comm.size

        # Creating larger queues to avoid queues blocking
        # These can be updated after queue limits are better understood
        self.pending_task_queue = queue.Queue()
        self.pending_result_queue = queue.Queue()
        self.ready_worker_queue = queue.Queue()

        self.tasks_per_round = 1

        self.heartbeat_period = heartbeat_period
github wehr-lab / autopilot / autopilot / core / networking.py View on Github external
def init_networking(self):
        """
        Creates socket, connects to specified port on localhost,
        and starts the :meth:`~Net_Node.threaded_loop` as a daemon thread.
        """
        self.sock = self.context.socket(zmq.DEALER)
        self.sock.identity = self.id
        #self.sock.probe_router = 1

        # net nodes are local only
        self.sock.connect('tcp://localhost:{}'.format(self.port))

        # wrap in zmqstreams and start loop thread
        self.sock = ZMQStream(self.sock, self.loop)
        self.sock.on_recv(self.handle_listen)

        self.loop_thread = threading.Thread(target=self.threaded_loop)
        self.loop_thread.daemon = True
        self.loop_thread.start()

        self.repeat_thread = threading.Thread(target=self.repeat)
        self.repeat_thread.daemon = True
github csirtgadgets / csirtg-smrt-v1 / csirtg_smrt / client / zzmq.py View on Github external
import logging
import os.path

from csirtg_indicator import Indicator
import zmq
from pprint import pprint

TYPE = os.environ.get('CSIRTG_SMRT_ZMQ_TYPE', 'PUB')
TOPIC = os.environ.get('CSIRTG_SMRT_ZMQ_TOPIC', 'scanners')
ENDPOINT = os.environ.get('CSIRTG_SMRT_ZMQ_ENDPOINT', 'ipc:///tmp/csirtg_smrt.ipc')

TYPE_MAPPING = {
    'PUB': zmq.PUB,
    'PUSH': zmq.PUSH,
    'PUSH_ZYRE_GATEWAY': zmq.DEALER,
}

logger = logging.getLogger()


class _Zmq(object):

    __name__ = 'zmq'

    def __init__(self, socket_type=TYPE, topic=TOPIC, endpoint=ENDPOINT, *args, **kwargs):
        self.socket_type = socket_type
        self.topic = topic
        self.endpoint = endpoint
        if not endpoint:
            raise ValueError("Invalid endpoint: '{}'".format(endpoint))
github yatsu / react-tornado-graphql-example / tornado / tornado_graphql_example / web_app.py View on Github external
def _execute_command(self, command):
        if len(self.job_servers) == 0:
            app_log.error('there is no job server')
            return

        server = self.job_servers[self.job_server_index]
        self.job_server_index = (self.job_server_index + 1) % len(self.job_servers)

        context = zmq.Context.instance()
        zmq_sock = context.socket(zmq.DEALER)
        zmq_sock.linger = 1000
        zmq_sock.identity = bytes(str(os.getpid()), 'ascii')
        ip = server['ip']
        if ip == '*':
            ip = 'localhost'
        url = 'tcp://{0}:{1}'.format(ip, server['zmq_port'])
        app_log.info('connect %s', url)
        zmq_sock.connect(url)

        command = json_encode({'command': command})
        app_log.info('command: %s', command)
        zmq_sock.send_multipart([b'0', bytes(command, 'ascii')])

        stream = ZMQStream(zmq_sock)
        stream.on_recv(self.response_handler)