How to use the msgpack.packb function in msgpack

To help you get started, we’ve selected a few msgpack examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github cocaine / cocaine-flow / flow-tools / views / SocketIO.py View on Github external
    @event('update:app')
    @authorization_required
    @source
    def update_app(self, data, key):
        '''
        Update fields of app structure
        '''
        APP_LOGGER.error(str(data))
        yield Service("flow-app").enqueue("update", msgpack.packb(data))
        self.emit(key, {"app": data})
github technologiescollege / Blockly-at-rduino / supervision / s2aio / Lib / site-packages / autobahn / wamp / serializer.py View on Github external
def serialize(self, obj):
            """
            Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.serialize`
            """
            data = msgpack.packb(obj, use_bin_type=self.ENABLE_V5)
            if self._batched:
                return struct.pack("!L", len(data)) + data
            else:
                return data
github brmmm3 / fastthreadpool / examples / benchmarks / benchmark.py View on Github external
def pack_compress_gen_cb(data):
        # noinspection PyArgumentList
        yield zstd.ZstdCompressor(write_content_size=True, write_checksum=True, level=14).compress(msgpack.packb(data, use_bin_type=True))
        #yield lz4.block.compress(msgpack.packb(data, use_bin_type = True))
github yandex / mastermind / src / cocaine-app / jobs / tasks / change_couple_frozen_status.py View on Github external
def _try_write_meta_key(self, session):
        couple = storage.couples[self.couple]

        settings = couple.groupset_settings
        settings['frozen'] = self.frozen
        metakey = couple.compose_group_meta(couple, settings)

        s = session.clone()
        s.add_groups([g.group_id for g in couple.groups])
        _, failed_groups = helpers.write_retry(
            s,
            keys.SYMMETRIC_GROUPS_KEY,
            msgpack.packb(metakey),
            retries=1,  # retries will be performed by jobs processor itself
        )
        if failed_groups:
            logger.error(
                'Job {job_id}, task {task_id}: failed to write metakey to groups {groups}'.format(
                    job_id=self.parent_job.id,
                    task_id=self.id,
                    groups=failed_groups,
                )
            )
        else:
            logger.debug(
                'Job {job_id}, task {task_id}: metakey is successfully written '
                'to couple {couple}'.format(
                    job_id=self.parent_job.id,
                    task_id=self.id,
github inveniosoftware / invenio / invenio / modules / upgrader / upgrades / invenio_2013_09_16_aidPERSONIDDATA_datablob.py View on Github external
operations = list()
            for tag, value in request_ticket_attributes:
                if tag == 'confirm':
                    operations.append(('assign', value))
                elif tag == 'repeal':
                    operations.append(('reject', value))
                else:
                    new_request_ticket[tag] = value

            new_request_ticket['operations'] = operations

            if new_request_ticket['operations']:
                new_request_tickets.append(new_request_ticket)

        new_request_tickets_num = len(new_request_tickets)
        new_request_tickets = serialize(new_request_tickets)

        run_sql("""insert into aidPERSONIDDATA
                   (personid, tag, datablob, opt1)
                   values (%s, %s, %s, %s)""",
                   (pid, 'request_tickets', new_request_tickets, new_request_tickets_num) )

    run_sql("""delete from aidPERSONIDDATA
               where tag like %s""",
               ('rt_%', ))
github earthgecko / skyline / skyline / analyzer_dev / algorithms_dev.py View on Github external
def is_anomalously_anomalous(metric_name, ensemble, datapoint):
    """
    This method runs a meta-analysis on the metric to determine whether the
    metric has a past history of triggering. TODO: weight intervals based on datapoint
    """
    # We want the datapoint to avoid triggering twice on the same data
    new_trigger = [time(), datapoint]

    # Get the old history
    raw_trigger_history = redis_conn.get('trigger_history.' + metric_name)
    if not raw_trigger_history:
        redis_conn.set('trigger_history.' + metric_name, packb([(time(), datapoint)]))
        return True

    trigger_history = unpackb(raw_trigger_history)

    # Are we (probably) triggering on the same data?
    if (new_trigger[1] == trigger_history[-1][1] and
            new_trigger[0] - trigger_history[-1][0] <= 300):
                return False

    # Update the history
    trigger_history.append(new_trigger)
    redis_conn.set('trigger_history.' + metric_name, packb(trigger_history))

    # Should we surface the anomaly?
    trigger_times = [x[0] for x in trigger_history]
    intervals = [
github sbl-sdsc / mmtf-pyspark / mmtfPyspark / utils / mmtfCodec.py View on Github external
def get_msgpack(data):
    """Get the msgpack of the encoded data."""
    return msgpack.packb(data, use_bin_type=True)
github iotile / coretools / iotilegateway / iotilegateway / supervisor / ws_handler.py View on Github external
def pack(self, message):
        """Pack a message into a binary packed message with datetime handling."""
        return msgpack.packb(message, use_bin_type=True, default=self.encode_datetime)
github TUDelft-CNS-ATM / bluesky / bluesky / network / node.py View on Github external
def send_stream(self, name, data):
        self.stream_out.send_multipart([name + self.node_id, msgpack.packb(data, default=encode_ndarray, use_bin_type=True)])