How to use the msgpack.dumps function in msgpack

To help you get started, we’ve selected a few msgpack examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github openstack / ceilometer / tests / collector / test_service.py View on Github external
def _make_fake_socket(self, family, type):
        udp_socket = self.mox.CreateMockAnything()
        udp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        udp_socket.bind((self.CONF.collector.udp_address,
                         self.CONF.collector.udp_port))

        def stop_udp(anything):
            # Make the loop stop
            self.srv.stop()

        udp_socket.recvfrom(64 * 1024).WithSideEffects(
            stop_udp).AndReturn(
                (msgpack.dumps(self.counter),
                 ('127.0.0.1', 12345)))

        self.mox.ReplayAll()

        return udp_socket
github andresriancho / w3af / w3af / core / data / db / history.py View on Github external
path, fname = os.path.split(path_fname)
            split_path = path.split('/')

            for i in xrange(len(split_path) + 1):
                test_path = '/'.join(split_path[:i])
                if not os.path.exists(test_path):
                    msg = ('Directory does not exist: "%s" while trying to'
                           ' write DB history to "%s"')
                    raise IOError(msg % (test_path, path_fname))

            raise

        data = (self.request.to_dict(),
                self.response.to_dict(),
                self._MSGPACK_CANARY)
        msgpack_data = msgpack.dumps(data)

        req_res.write(msgpack_data)
        req_res.close()

        response_id = resp.get_id()
        self._queue_compression_requests(response_id)

        pending_compression = self._get_pending_compression_job()

        if pending_compression is not None:
            self._process_pending_compression(pending_compression)

        return True
github ooici / pyon / pyon / net / interceptor.py View on Github external
def int_out(self, invocation):
        log.debug("EncoderInterceptor.int_out: %s", invocation)
        log.debug("Pre-transform: %s", invocation.message)
        invocation.message = msgpack.dumps(invocation.message)
        log.debug("Post-transform: %s", invocation.message)
        return invocation
github drbh / ncipfs / ncipfs.py View on Github external
"""
        cid = client.add_contents(
            policy_pubkey=policy_pub_key
        )
        """
        policy_pubkey = alicia.get_policy_pubkey_from_label(my_label)

        data_source = Enrico(policy_encrypting_key=policy_pubkey)
        data_source_public_key = bytes(data_source.stamp)
        heart_rate = 80
        now = time.time()
        kits = list()
        heart_rate = contents
        now += 3
        heart_rate_data = { 'heart_rate': heart_rate, 'timestamp': now, }
        plaintext = msgpack.dumps(heart_rate_data, use_bin_type=True)
        message_kit, _signature = data_source.encrypt_message(plaintext)
        kit_bytes = message_kit.to_bytes()
        kits.append(kit_bytes)
        data = { 'data_source': data_source_public_key, 'kits': kits, }
#         print("🚀 ADDING TO IPFS D-STORAGE NETWORK 🚀")
        d = msgpack.dumps(data, use_bin_type=True)

        ### NETWORK ERROR OUT ON FALLBACK 
        cid = self.ipfs_gateway_api.add_bytes(d)
#         print("File Address:\t%s" % cid)
        return cid
github klmitch / turnstile / turnstile / compactor.py View on Github external
removes outdated updates.

    :param db: A database handle for the Redis database.
    :param buck_key: A turnstile.limits.BucketKey instance containing
                     the bucket key.
    :param limit: The turnstile.limits.Limit object corresponding to
                  the bucket.
    """

    # Suck in the bucket records and generate our bucket
    records = db.lrange(str(buck_key), 0, -1)
    loader = limits.BucketLoader(limit.bucket_class, db, limit,
                                 str(buck_key), records, stop_summarize=True)

    # We now have the bucket loaded in; generate a 'bucket' record
    buck_record = msgpack.dumps(dict(bucket=loader.bucket.dehydrate(),
                                     uuid=str(uuid.uuid4())))

    # Now we need to insert it into the record list
    result = db.linsert(str(buck_key), 'after', loader.last_summarize_rec,
                        buck_record)

    # Were we successful?
    if result < 0:
        # Insert failed; we'll try again when max_age is hit
        LOG.warning("Bucket compaction on %s failed; will retry" % buck_key)
        return

    # OK, we have confirmed that the compacted bucket record has been
    # inserted correctly; now all we need to do is trim off the
    # outdated update records
    db.ltrim(str(buck_key), loader.last_summarize_idx + 1, -1)
github pioneers / PieCentral / runtime / runtime / service / control.py View on Github external
async def main(self):
        context = zmq.asyncio.Context()
        socket = context.socket(zmq.REP)
        socket.bind('tcp://*:8120')

        while True:
            print(await socket.poll(10))
            msg = msgpack.loads(await socket.recv())
            print(msg)
            await socket.send(msgpack.dumps({'i': msg[b'i'] + 1}))
github stratosphereips / Ludus / ludus.py View on Github external
def sendline(self,data):
        packed = msgpack.dumps(data)
        self.zmqsocket.send_multipart([self.TOPIC, packed])
    def close(self):
github deliveryhero / lymph / lymph / serializers / base.py View on Github external
return msgpack.ExtType(EMBEDDED_MSGPACK_TYPE, data)


def ext_hook(code, data):
    if code == EMBEDDED_MSGPACK_TYPE:
        return msgpack_serializer.loads(data)
    return msgpack.ExtType(code, data)


def _msgpack_load(stream, *args, **kwargs):
    # temporary workaround for https://github.com/msgpack/msgpack-python/pull/143
    return msgpack.loads(stream.read(), *args, **kwargs)


msgpack_serializer = BaseSerializer(
    dumps=functools.partial(msgpack.dumps, use_bin_type=True),
    loads=functools.partial(msgpack.loads, encoding='utf-8', ext_hook=ext_hook),
    dump=functools.partial(msgpack.dump, use_bin_type=True),
    load=functools.partial(_msgpack_load, encoding='utf-8', ext_hook=ext_hook),
)

json_serializer = BaseSerializer(dumps=json.dumps, loads=json.loads, dump=json.dump, load=json.load)
github dropbox / dropbox-sdk-python / dropbox / stone_serializers.py View on Github external
def msgpack_encode(data_type, obj):
        return msgpack.dumps(
            msgpack_compat_obj_encode(data_type, obj), encoding='utf-8')
github saltstack / salt-bin / mk_repo.py View on Github external
data[base][fn]['blake'] = hashlib.blake2b(raw).hexdigest()
                data[base][fn]['sha512'] = hashlib.sha512(raw).hexdigest()
                data[base][fn]['sha3_512'] = hashlib.sha3_512(raw).hexdigest()

    for base, rels in data.items():
        names = []
        for fn, rel in rels.items():
            names.append(rel['version'])
        names = sorted(names, key=StrictVersion)
        latest = os.path.join(f'salt-{names[-1]}')
        latest_lk = os.path.join('dist', base, 'salt')
        if os.path.exists(latest_lk):
            os.remove(latest_lk)
        os.symlink(latest, latest_lk)
    with open('dist/repo.mp', 'wb+') as wfh:
        wfh.write(msgpack.dumps(data))
    with open('dist/repo.json', 'w+') as wfh:
        wfh.write(json.dumps(data))