How to use the funcx.serialize.FuncXSerializer function in funcx

To help you get started, we’ve selected a few funcx examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github funcx-faas / funcX / funcx / executors / high_throughput / interchange.py View on Github external
except FileExistsError:
            pass

        start_file_logger("{}/interchange.log".format(self.logdir), level=logging_level)
        logger.info("logger location {}".format(logger.handlers))
        logger.info("Initializing Interchange process with Endpoint ID: {}".format(endpoint_id))
        self.config = config
        logger.info("Got config : {}".format(config))

        self.strategy = self.config.strategy
        self.client_address = client_address
        self.interchange_address = interchange_address
        self.suppress_failure = suppress_failure
        self.poll_period = poll_period

        self.serializer = FuncXSerializer()
        logger.info("Attempting connection to client at {} on ports: {},{},{}".format(
            client_address, client_ports[0], client_ports[1], client_ports[2]))
        self.context = zmq.Context()
        self.task_incoming = self.context.socket(zmq.DEALER)
        self.task_incoming.set_hwm(0)
        self.task_incoming.RCVTIMEO = 10  # in milliseconds
        logger.info("Task incoming on tcp://{}:{}".format(client_address, client_ports[0]))
        self.task_incoming.connect("tcp://{}:{}".format(client_address, client_ports[0]))

        self.results_outgoing = self.context.socket(zmq.DEALER)
        self.results_outgoing.set_hwm(0)
        logger.info("Results outgoing on tcp://{}:{}".format(client_address, client_ports[1]))
        self.results_outgoing.connect("tcp://{}:{}".format(client_address, client_ports[1]))

        self.command_channel = self.context.socket(zmq.DEALER)
        self.command_channel.RCVTIMEO = 1000  # in milliseconds
github funcx-faas / funcX / funcx / executors / high_throughput / funcx_worker.py View on Github external
def __init__(self, worker_id, address, port, logdir, debug=False, worker_type='RAW'):

        self.worker_id = worker_id
        self.address = address
        self.port = port
        self.logdir = logdir
        self.debug = debug
        self.worker_type = worker_type
        self.serializer = FuncXSerializer()
        self.serialize = self.serializer.serialize
        self.deserialize = self.serializer.deserialize

        global logger
        logger = set_file_logger('{}/funcx_worker_{}.log'.format(logdir, worker_id),
                                 name="worker_log",
                                 level=logging.DEBUG if debug else logging.INFO)

        logger.info('Initializing worker {}'.format(worker_id))
        logger.info('Worker is of type: {}'.format(worker_type))

        if debug:
            logger.debug('Debug logging enabled')

        self.context = zmq.Context()
        self.poller = zmq.Poller()
github funcx-faas / funcX / funcx / executors / high_throughput / executor.py View on Github external
"""

from concurrent.futures import Future
import os
import time
import logging
import threading
import queue
import pickle
import daemon
from multiprocessing import Process, Queue

#from ipyparallel.serialize import pack_apply_message  # ,unpack_apply_message
from ipyparallel.serialize import deserialize_object  # ,serialize_object
from funcx.serialize import FuncXSerializer
fx_serializer = FuncXSerializer()

from parsl.executors.high_throughput import interchange
from parsl.executors.errors import *
from parsl.executors.base import ParslExecutor
from parsl.dataflow.error import ConfigurationError

from parsl.utils import RepresentationMixin
from parsl.providers import LocalProvider


from funcx.executors.high_throughput import zmq_pipes

logger = logging.getLogger(__name__)

BUFFER_THRESHOLD = 1024 * 1024
ITEM_THRESHOLD = 1024
github funcx-faas / funcX / funcx / sdk / client.py View on Github external
no_local_server=kwargs.get("no_local_server", True),
                                     no_browser=kwargs.get("no_browser", True),
                                     refresh_tokens=kwargs.get("refresh_tokens", True),
                                     force=force_login)

            all_authorizers = self.native_client.get_authorizers_by_scope(requested_scopes=scopes)
            fx_authorizer = all_authorizers[fx_scope]
            search_authorizer = all_authorizers[search_scope]

        super(FuncXClient, self).__init__("funcX",
                                          environment='funcx',
                                          authorizer=fx_authorizer,
                                          http_timeout=http_timeout,
                                          base_url=funcx_service_address,
                                          **kwargs)
        self.fx_serializer = FuncXSerializer()
        self.searcher = SearchHelper(authorizer=search_authorizer)
github DLHub-Argonne / dlhub_sdk / dlhub_sdk / client.py View on Github external
token_dir=_token_dir,
                             no_local_server=kwargs.get("no_local_server", True),
                             no_browser=kwargs.get("no_browser", True))
            dlh_authorizer = auth_res["dlhub"]
            fx_authorizer = auth_res[fx_scope]
            self._search_client = auth_res["search"]
            self._fx_client = FuncXClient(force_login=force_login,
                                          fx_authorizer=fx_authorizer,
                                          no_local_server=kwargs.get("no_local_server", True),
                                          no_browser=kwargs.get("no_browser", True),
                                          funcx_service_address='https://funcx.org/api/v1')

        # funcX endpoint to use
        self.fx_endpoint = '86a47061-f3d9-44f0-90dc-56ddc642c000'
        # self.fx_endpoint = '2c92a06a-015d-4bfa-924c-b3d0c36bdad7'
        self.fx_serializer = FuncXSerializer()
        self.fx_cache = {}
        super(DLHubClient, self).__init__("DLHub", environment='dlhub', authorizer=dlh_authorizer,
                                          http_timeout=http_timeout, base_url=DLHUB_SERVICE_ADDRESS,
                                          **kwargs)
github funcx-faas / funcX / funcx / sdk / search.py View on Github external
Parameters
        ----------
        gsearchresult : dict
        """
        # wrapper for an array of results
        results = gsearchresult['results']
        super().__init__(results)

        # track data about where we are in total results
        self.has_next_page = gsearchresult['has_next_page']
        self.offset = gsearchresult['offset']
        self.total = gsearchresult['total']

        # we can use this to load functions and run them
        self.serializer = FuncXSerializer()

        # Reformat for pretty printing and easy viewing
        self._init_columns()
        self.table = Texttable(max_width=120)
        self.table.header(self.columns)
        for res in self:
            self.table.add_row([
                res[col] for col in self.columns
            ])
github funcx-faas / funcX / funcx / executors / high_throughput / funcx_manager.py View on Github external
self.task_queues = {}
        if worker_type:
            self.task_queues[worker_type] = queue.Queue()
        self.outstanding_task_count = {}
        self.task_type_mapping = {}

        self.pending_result_queue = multiprocessing.Queue()

        self.max_queue_size = max_queue_size + self.max_worker_count
        self.tasks_per_round = 1

        self.heartbeat_period = heartbeat_period
        self.heartbeat_threshold = heartbeat_threshold
        self.poll_period = poll_period
        self.serializer = FuncXSerializer()
        self.next_worker_q = []  # FIFO queue for spinning up workers.
        self.worker_procs = {}
github funcx-faas / funcX / funcx / executors / high_throughput / funcx_manager.py View on Github external
min_port=self.internal_worker_port_range[0],
            max_port=self.internal_worker_port_range[1])

        logger.info("Manager listening on {} port for incoming worker connections".format(self.worker_port))

        self.task_queues = {}

        self.pending_result_queue = multiprocessing.Queue()

        self.max_queue_size = max_queue_size + self.worker_count
        self.tasks_per_round = 1

        self.heartbeat_period = heartbeat_period
        self.heartbeat_threshold = heartbeat_threshold
        self.poll_period = poll_period
        self.serializer = FuncXSerializer()
        self.next_worker_q = []  # FIFO queue for spinning up workers.
        self.worker_procs = []
github funcx-faas / funcX / funcx / sdk / utils / batch.py View on Github external
def __init__(self):
        self.tasks = []
        self.fx_serializer = FuncXSerializer()
github funcx-faas / funcX / funcx / executors / high_throughput / executor.py View on Github external
to be addressed.
"""

from concurrent.futures import Future
import os
import logging
import threading
import queue
import pickle
import daemon
from multiprocessing import Process, Queue

#from ipyparallel.serialize import pack_apply_message  # ,unpack_apply_message
from ipyparallel.serialize import deserialize_object  # ,serialize_object
from funcx.serialize import FuncXSerializer
fx_serializer = FuncXSerializer()

from parsl.executors.high_throughput import interchange
from parsl.executors.errors import *
from parsl.executors.base import ParslExecutor
from parsl.dataflow.error import ConfigurationError

from parsl.utils import RepresentationMixin
from parsl.providers import LocalProvider


from funcx.executors.high_throughput import zmq_pipes

logger = logging.getLogger(__name__)

BUFFER_THRESHOLD = 1024 * 1024
ITEM_THRESHOLD = 1024