How to use the aioredis.util.decode function in aioredis

To help you get started, we’ve selected a few aioredis examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github adamcharnock / lightbus / lightbus / transports / redis / utilities.py View on Github external
def redis_steam_id_to_datetime(message_id):
    message_id = decode(message_id, "utf8")
    milliseconds, seq = map(int, message_id.split("-"))
    # Treat the sequence value as additional microseconds to ensure correct sequencing
    microseconds = (milliseconds % 1000 * 1000) + seq
    dt = datetime.utcfromtimestamp(milliseconds // 1000).replace(
        microsecond=microseconds, tzinfo=timezone.utc
    )
    return dt
github adamcharnock / lightbus / lightbus / transports / redis.py View on Github external
# Get any messages that this consumer has yet to process.
            # This can happen in the case where the processes died before acknowledging.
            pending_messages = await redis.xread_group(
                group_name=consumer_group,
                consumer_name=self.consumer_name,
                streams=list(streams.keys()),
                # Using ID '0' indicates we want unacked pending messages
                latest_ids=["0"] * len(streams),
                timeout=None,  # Don't block, return immediately
            )

            event_messages = []
            for stream, message_id, fields in pending_messages:
                message_id = decode(message_id, "utf8")
                stream = decode(stream, "utf8")
                event_message = self._fields_to_message(
                    fields,
                    expected_events,
                    stream=stream,
                    native_id=message_id,
                    consumer_group=consumer_group,
                )
                if not event_message:
                    # noop message, or message an event we don't care about
                    continue
                logger.debug(
                    LBullets(
                        L(
                            "⬅ Receiving pending event {} on stream {}",
                            Bold(message_id),
                            Bold(stream),
github aio-libs / aioredis / aioredis / connection.py View on Github external
def _process_data(self, obj):
        """Processes command results."""
        waiter, encoding, cb = self._waiters.popleft()
        if isinstance(obj, RedisError):
            _set_exception(waiter, obj)
            if self._in_transaction is not None:
                self._transaction_error = obj
        else:
            if encoding is not None:
                try:
                    obj = decode(obj, encoding)
                except Exception as exc:
                    _set_exception(waiter, exc)
                    return
            if cb is not None:
                try:
                    obj = cb(obj)
                except Exception as exc:
                    _set_exception(waiter, exc)
                    return
            _set_result(waiter, obj)
            if self._in_transaction is not None:
                self._in_transaction.append((encoding, cb))
github adamcharnock / lightbus / lightbus / transports / redis.py View on Github external
# This will block until there are some messages available
                stream_messages = await redis.xread_group(
                    group_name=consumer_group,
                    consumer_name=self.consumer_name,
                    streams=list(streams.keys()),
                    # Using ID '>' indicates we only want new messages which have not
                    # been passed to other consumers in this group
                    latest_ids=[">"] * len(streams),
                    count=self.batch_size,
                )

                # Handle the messages we have received
                event_messages = []
                for stream, message_id, fields in stream_messages:
                    message_id = decode(message_id, "utf8")
                    stream = decode(stream, "utf8")
                    event_message = self._fields_to_message(
                        fields,
                        expected_events,
                        stream=stream,
                        native_id=message_id,
                        consumer_group=consumer_group,
                    )
                    if not event_message:
                        # noop message, or message an event we don't care about
                        continue
                    logger.debug(
                        LBullets(
                            L(
                                "⬅ Received new event {} on stream {}",
                                Bold(message_id),
                                Bold(stream),
github adamcharnock / lightbus / lightbus / transports / redis.py View on Github external
"""
        with await self.connection_manager() as redis:
            for stream in stream_names:
                old_messages = await redis.xpending(
                    stream, consumer_group, "-", "+", count=self.reclaim_batch_size
                )
                timeout = self.acknowledgement_timeout * 1000
                event_messages = []
                for (
                    message_id,
                    consumer_name,
                    ms_since_last_delivery,
                    num_deliveries,
                ) in old_messages:
                    message_id = decode(message_id, "utf8")
                    consumer_name = decode(consumer_name, "utf8")

                    if ms_since_last_delivery > timeout:
                        logger.info(
                            L(
                                "Found timed out event {} in stream {}. Abandoned by {}. Attempting to reclaim...",
                                Bold(message_id),
                                Bold(stream),
                                Bold(consumer_name),
                            )
                        )

                    result = await redis.xclaim(
                        stream, consumer_group, self.consumer_name, int(timeout), message_id
                    )
                    for claimed_message_id, fields in result:
                        claimed_message_id = decode(claimed_message_id, "utf8")
github adamcharnock / lightbus / lightbus / transports / redis.py View on Github external
# Fetch some messages.
                # This will block until there are some messages available
                stream_messages = await redis.xread_group(
                    group_name=consumer_group,
                    consumer_name=self.consumer_name,
                    streams=list(streams.keys()),
                    # Using ID '>' indicates we only want new messages which have not
                    # been passed to other consumers in this group
                    latest_ids=[">"] * len(streams),
                    count=self.batch_size,
                )

                # Handle the messages we have received
                event_messages = []
                for stream, message_id, fields in stream_messages:
                    message_id = decode(message_id, "utf8")
                    stream = decode(stream, "utf8")
                    event_message = self._fields_to_message(
                        fields,
                        expected_events,
                        stream=stream,
                        native_id=message_id,
                        consumer_group=consumer_group,
                    )
                    if not event_message:
                        # noop message, or message an event we don't care about
                        continue
                    logger.debug(
                        LBullets(
                            L(
                                "⬅ Received new event {} on stream {}",
                                Bold(message_id),
github adamcharnock / lightbus / lightbus / transports / redis / event.py View on Github external
L(
                                    "Found timed out event {} in stream {}. Abandoned by {}. Attempting to reclaim...",
                                    Bold(message_id),
                                    Bold(stream),
                                    Bold(consumer_name),
                                )
                            )

                            # *Try* to claim the messages...
                            result = await redis.xclaim(
                                stream, consumer_group, self.consumer_name, int(timeout), message_id
                            )

                            # Parse each message we managed to claim
                            for claimed_message_id, fields in result:
                                claimed_message_id = decode(claimed_message_id, "utf8")
                                event_message = self._fields_to_message(
                                    fields,
                                    expected_events,
                                    stream=stream,
                                    native_id=claimed_message_id,
                                    consumer_group=consumer_group,
                                )
                                if not event_message:
                                    # noop message, or message an event we don't care about
                                    continue
                                logger.debug(
                                    LBullets(
                                        L(
                                            "⬅ Reclaimed timed out event {} on stream {}. Abandoned by {}.",
                                            Bold(message_id),
                                            Bold(stream),
github adamcharnock / lightbus / lightbus / transports / redis / event.py View on Github external
# This will block until there are some messages available
                stream_messages = await redis.xread_group(
                    group_name=consumer_group,
                    consumer_name=self.consumer_name,
                    streams=list(streams.keys()),
                    # Using ID '>' indicates we only want new messages which have not
                    # been passed to other consumers in this group
                    latest_ids=[">"] * len(streams),
                    count=self.batch_size,
                )

                # Handle the messages we have received
                event_messages = []
                for stream, message_id, fields in stream_messages:
                    message_id = decode(message_id, "utf8")
                    stream = decode(stream, "utf8")
                    event_message = self._fields_to_message(
                        fields,
                        expected_events,
                        stream=stream,
                        native_id=message_id,
                        consumer_group=consumer_group,
                    )
                    if not event_message:
                        # noop message, or message an event we don't care about
                        continue
                    logger.debug(
                        LBullets(
                            L(
                                "⬅ Received new event {} on stream {}",
                                Bold(message_id),
                                Bold(stream),