How to use orjson - 10 common examples

To help you get started, we’ve selected a few orjson examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dutradda / jsondaora / jsondaora / deserializers.py View on Github external
elif (isinstance(value, bytes) or isinstance(value, str)) and (
            dataclasses.is_dataclass(field_type)
            or (
                isinstance(field_type, _GenericAlias)
                and (
                    field_type.__origin__ is dict
                    or field_type.__origin__ is list
                    or field_type.__origin__ is tuple
                )
            )
        ):
            return deserialize_field(
                field_name=field_name,
                field_type=field_type,
                value=orjson.loads(value),
                cls=cls,
                field_default=field_default,
            )

        elif isinstance(field_type, _GenericAlias):
            return _deserialize_generic_type(field_type, field_name, value)

        if field_type is Any:
            return value

        elif isinstance(value, field_type):
            return value

        elif (
            isinstance(field_type, type)
            and issubclass(field_type, str)
github aleph-im / pyaleph / src / aleph / network.py View on Github external
async def incoming_check(ipfs_pubsub_message):
    """ Verifies an incoming message is sane, protecting from spam in the
    meantime.

    TODO: actually implement this, no check done here yet. IMPORTANT.
    """

    try:
        message = json.loads(ipfs_pubsub_message.get('data', ''))
        LOGGER.debug("New message! %r" % message)
        message = await check_message(message, from_network=True)
        return message
    except json.JSONDecodeError:
        LOGGER.exception('Received non-json message %r'
                         % ipfs_pubsub_message.get('data', ''))
github kivo360 / jamboree / jamboree / base / main.py View on Github external
def back_to_dict(self, list_of_serialized:list):
        deserialized = []
        if len(list_of_serialized) == 1:
            return orjson.loads(list_of_serialized[0])
        
        for i in list_of_serialized:
            
            deserialized.append(orjson.loads(i))
        return deserialized
github aleph-im / pyaleph / src / aleph / services / p2p / peers.py View on Github external
async def monitor_hosts(psub):
    from aleph.model.p2p import add_peer
    alive_sub = await psub.subscribe(ALIVE_TOPIC)
    while True:
        try:
            mvalue = await alive_sub.get()
            mvalue = await decode_msg(mvalue)
            LOGGER.debug("New message received %r" % mvalue)
            content = json.loads(mvalue['data'])
            peer_type = content.get('peer_type', 'P2P')
            if not isinstance(content['address'], str):
                raise ValueError('Bad address')
            if not isinstance(content['peer_type'], str):
                raise ValueError('Bad peer type')
            
            # TODO: handle interests and save it
            
            if peer_type not in ['P2P', 'HTTP']:
                raise ValueError('Unsupported peer type %r' % peer_type)
            
            await add_peer(address=content['address'], peer_type=peer_type)
        except Exception:
            LOGGER.exception("Exception in pubsub peers monitoring")
github tweag / trustix / packages / trustix-nix-reprod / trustix_nix_reprod / api / diff.py View on Github external
async def diff(output_hash_1_hex: str, output_hash_2_hex: str) -> DiffResponse:
    output_hash_1 = codecs.decode(output_hash_1_hex, "hex")  # type: ignore
    output_hash_2 = codecs.decode(output_hash_2_hex, "hex")  # type: ignore

    rpc_client = get_rpcapi()

    narinfo1, narinfo2 = [
        orjson.loads(resp.Value)
        for resp in (
            await asyncio.gather(
                rpc_client.GetValue(api_pb2.ValueRequest(Digest=output_hash_1)),  # type: ignore
                rpc_client.GetValue(api_pb2.ValueRequest(Digest=output_hash_2)),  # type: ignore
            )
        )
    ]

    diffoscope = await asyncio.get_running_loop().run_in_executor(
        None, _diff, narinfo1, narinfo2
    )

    return DiffResponse(
        narinfo={
            output_hash_1_hex: narinfo1,
            output_hash_2_hex: narinfo2,
github aleph-im / pyaleph / src / aleph / services / p2p.py View on Github external
async def monitor_hosts(psub):
    from aleph.model.p2p import add_peer
    alive_sub = await pubsub.subscribe(ALIVE_TOPIC)
    while True:
        try:
            mvalue = await alive_sub.get()
            mvalue = await decode_msg(mvalue)
            LOGGER.debug("New message received %r" % mvalue)
            content = json.loads(mvalue['data'])
            # TODO: check message validity
            await add_peer(address=content['address'], peer_type="P2P")
        except Exception:
            LOGGER.exception("Exception in pubsub peers monitoring")
github aleph-im / pyaleph / src / aleph / services / ipfs / storage.py View on Github external
async def get_json(hash, timeout=1, tries=1):
    result = await get_ipfs_content(hash, timeout=timeout, tries=tries)
    if result is not None and result != -1:
        try:
            result = await loop.run_in_executor(None, json.loads, result)
        except json.JSONDecodeError:
            try:
                import json as njson
                result = await loop.run_in_executor(None, njson.loads, result)
            except (json.JSONDecodeError, KeyError): 
                LOGGER.exception("Can't decode JSON")
                result = -1  # never retry, bogus data
    return result
github aleph-im / pyaleph / src / aleph / chains / nuls2.py View on Github external
start_height):
    """ Continuously request data from the NULS blockchain.
    """
    target_addr = config.nuls2.sync_address.value
    remark = config.nuls2.remark.value
    chain_id = config.nuls2.chain_id.value

    last_height = None
    async for tx in get_transactions(config, session, chain_id,
                                     target_addr, start_height, remark=remark):
        ldata = tx['txDataHex']
        LOGGER.info('Handling TX in block %s' % tx['height'])
        try:
            ddata = bytes.fromhex(ldata).decode('utf-8')
            last_height = tx['height']
            jdata = json.loads(ddata)
            
            context = {"chain_name": CHAIN_NAME,
                       "tx_hash": tx['hash'],
                       "height": tx['height'],
                       "time": tx['createTime'],
                       "publisher": tx["coinFroms"][0]['address']}
            yield (jdata, context)

        except json.JSONDecodeError:
            # if it's not valid json, just ignore it...
            LOGGER.info("Incoming logic data is not JSON, ignoring. %r"
                        % ldata)

    if last_height:
        await Chain.set_last_height(CHAIN_NAME, last_height)
github tweag / trustix / packages / trustix-nix-reprod / trustix_nix_reprod / api / diff.py View on Github external
dir_a_rel = os.path.join(os.path.basename(os.path.dirname(dir_a)), "A")
        dir_b_rel = os.path.join(os.path.basename(os.path.dirname(dir_b)), "B")

        proc = subprocess.run(
            ["diffoscope", "--json", "-", dir_a_rel, dir_b_rel],
            stdout=asyncio.subprocess.PIPE,
            stderr=asyncio.subprocess.PIPE,
            cwd=tmpdir,
        )

    # Diffoscope returns non-zero on paths that have a diff
    # Instead use stderr as a heurestic if the call went well or not
    if proc.stderr:
        raise ValueError(proc.stderr)

    return orjson.loads(proc.stdout)
github kivo360 / jamboree / jamboree / base / main.py View on Github external
self.pool.schedule(self._concurrent_delete_many, args=(query, details))
        _hash = self._generate_hash(query)
        count = self._get_count(_hash, query)
        phindex = self.redis.incr("placeholder_del:index")
        placeholder_hash = f"{_hash}:placeholder:{phindex}"
        placeholder_hash_del = f"{_hash}:placeholder_del:{phindex}"
        push_key = f"{_hash}:list"
        rlock = f"{_hash}:lock"
        
        with self.redis.lock(rlock):
            all_matching_redis_items = self.back_to_dict(self.redis.lrange(push_key, 0, -1))
            if isinstance(all_matching_redis_items, dict):
                """ Remove replace the current list with the empty one"""
                is_true = self._search_one(all_matching_redis_items, details)
                if is_true == False: return
                self.redis.rpush(placeholder_hash, orjson.dumps(all_matching_redis_items))
            else:
                for match in all_matching_redis_items:
                    is_true = self._search_one(match, details)
                    if is_true:
                        self.redis.rpush(placeholder_hash, orjson.dumps(match))


            self.redis.rename(push_key, placeholder_hash_del)
            self.redis.rename(placeholder_hash, push_key)
        
        self.pool.schedule(self._concurrent_delete_list, args=(placeholder_hash_del))
        # Delete while unlocked.

orjson

Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy

Apache-2.0
Latest version published 4 days ago

Package Health Score

90 / 100
Full package analysis