How to use the aiokafka.errors.NoError function in aiokafka

To help you get started, we’ve selected a few aiokafka examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github aio-libs / aiokafka / tests / test_sender.py View on Github external
async def test_sender__do_init_pid_handle_ok(self):
        sender = await self._setup_sender(no_init=True)
        init_handler = InitPIDHandler(sender)

        # Handle response
        self.assertEqual(sender._txn_manager.producer_id, -1)
        self.assertEqual(sender._txn_manager.producer_epoch, -1)
        cls = InitProducerIdResponse[0]
        resp = cls(
            throttle_time_ms=300,
            error_code=NoError.errno,
            producer_id=17,
            producer_epoch=1
        )
        backoff = init_handler.handle_response(resp)
        self.assertIsNone(backoff)
        self.assertEqual(sender._txn_manager.producer_id, 17)
        self.assertEqual(sender._txn_manager.producer_epoch, 1)
github aio-libs / aiokafka / tests / test_conn.py View on Github external
async def test__do_sasl_handshake_v1(self):
        host, port = self.kafka_host, self.kafka_port

        # setup connection with mocked send and send_bytes
        conn = AIOKafkaConnection(
            host=host, port=port, loop=self.loop,
            sasl_mechanism="PLAIN",
            sasl_plain_username="admin",
            sasl_plain_password="123",
            security_protocol="SASL_PLAINTEXT"
        )
        conn.close = close_mock = mock.MagicMock()

        supported_mechanisms = ["PLAIN"]
        error_class = NoError
        auth_error_class = NoError

        async def mock_send(request, expect_response=True):
            if request.API_KEY == SaslHandShakeRequest[0].API_KEY:
                assert request.API_VERSION == 1
                return SaslHandShakeResponse[1](
                    error_code=error_class.errno,
                    enabled_mechanisms=supported_mechanisms
                )
            else:
                assert request.API_KEY == SaslAuthenticateRequest[0].API_KEY
                return SaslAuthenticateResponse[0](
                    error_code=auth_error_class.errno,
                    error_message="",
                    sasl_auth_bytes=b""
                )
github aio-libs / aiokafka / tests / test_conn.py View on Github external
async def test__do_sasl_handshake_v1(self):
        host, port = self.kafka_host, self.kafka_port

        # setup connection with mocked send and send_bytes
        conn = AIOKafkaConnection(
            host=host, port=port, loop=self.loop,
            sasl_mechanism="PLAIN",
            sasl_plain_username="admin",
            sasl_plain_password="123",
            security_protocol="SASL_PLAINTEXT"
        )
        conn.close = close_mock = mock.MagicMock()

        supported_mechanisms = ["PLAIN"]
        error_class = NoError
        auth_error_class = NoError

        async def mock_send(request, expect_response=True):
            if request.API_KEY == SaslHandShakeRequest[0].API_KEY:
                assert request.API_VERSION == 1
                return SaslHandShakeResponse[1](
                    error_code=error_class.errno,
                    enabled_mechanisms=supported_mechanisms
                )
            else:
                assert request.API_KEY == SaslAuthenticateRequest[0].API_KEY
                return SaslAuthenticateResponse[0](
                    error_code=auth_error_class.errno,
                    error_message="",
                    sasl_auth_bytes=b""
                )
github aio-libs / aiokafka / tests / test_conn.py View on Github external
await conn._do_sasl_handshake()

        supported_mechanisms = ["GSSAPI"]
        with self.assertRaises(UnsupportedSaslMechanismError):
            await conn._do_sasl_handshake()
        self.assertTrue(close_mock.call_count)
        supported_mechanisms = ["PLAIN"]

        auth_error_class = IllegalSaslStateError
        close_mock.reset()

        with self.assertRaises(IllegalSaslStateError):
            await conn._do_sasl_handshake()
        self.assertTrue(close_mock.call_count)
        auth_error_class = NoError

        error_class = UnknownError
        close_mock.reset()

        with self.assertRaises(UnknownError):
            await conn._do_sasl_handshake()
        self.assertTrue(close_mock.call_count)
github aio-libs / aiokafka / tests / test_conn.py View on Github external
async def test__do_sasl_handshake_v0(self):
        host, port = self.kafka_host, self.kafka_port

        # setup connection with mocked send and send_bytes
        conn = AIOKafkaConnection(
            host=host, port=port, loop=self.loop,
            sasl_mechanism="PLAIN",
            sasl_plain_username="admin",
            sasl_plain_password="123"
        )
        conn.close = close_mock = mock.MagicMock()

        supported_mechanisms = ["PLAIN"]
        error_class = NoError

        async def mock_send(request, expect_response=True):
            return SaslHandShakeResponse[0](
                error_code=error_class.errno,
                enabled_mechanisms=supported_mechanisms
            )

        async def mock_sasl_send(payload, expect_response):
            return b""

        conn.send = mock.Mock(side_effect=mock_send)
        conn._send_sasl_token = mock.Mock(side_effect=mock_sasl_send)
        conn._version_info = VersionInfo({
            SaslHandShakeRequest[0].API_KEY: [0, 0]
        })
github aio-libs / aiokafka / aiokafka / consumer / fetcher.py View on Github external
now_ms = int(1000 * time.time())
        for topic, partitions in response.topics:
            for partition, error_code, highwater, *part_data in partitions:
                tp = TopicPartition(topic, partition)
                error_type = Errors.for_code(error_code)
                fetch_offset = fetch_offsets[tp]
                tp_state = assignment.state_value(tp)
                if not tp_state.has_valid_position or \
                        tp_state.position != fetch_offset:
                    log.debug(
                        "Discarding fetch response for partition %s "
                        "since its offset %s does not match the current "
                        "position", tp, fetch_offset)
                    continue

                if error_type is Errors.NoError:
                    if request.API_VERSION >= 4:
                        aborted_transactions = part_data[-2]
                        lso = part_data[-3]
                    else:
                        aborted_transactions = None
                        lso = None
                    tp_state.highwater = highwater
                    tp_state.lso = lso
                    tp_state.timestamp = now_ms

                    # part_data also contains lso, aborted_transactions.
                    # message_set is last
                    records = MemoryRecords(part_data[-1])
                    if records.has_next():
                        log.debug(
                            "Adding fetched record for partition %s with"
github aio-libs / aiokafka / aiokafka / producer / sender.py View on Github external
def handle_response(self, resp):
        txn_manager = self._sender._txn_manager
        group_id = self._group_id

        for topic, partitions in resp.errors:
            for partition, error_code in partitions:
                tp = TopicPartition(topic, partition)
                error_type = Errors.for_code(error_code)

                if error_type is Errors.NoError:
                    offset = self._offsets[tp].offset
                    log.debug(
                        "Offset %s for partition %s committed to group %s",
                        offset, tp, group_id)
                    txn_manager.offset_committed(tp, offset, group_id)
                elif (error_type is CoordinatorNotAvailableError or
                        error_type is NotCoordinatorError or
                        # Copied from Java. Not sure why it's only in this case
                        error_type is RequestTimedOutError):
                    self._sender._coordinator_dead(CoordinationType.GROUP)
                    return self._default_backoff
                elif (error_type is CoordinatorLoadInProgressError or
                        error_type is UnknownTopicOrPartitionError):
                    # We will just retry after backoff
                    return self._default_backoff
                elif error_type is InvalidProducerEpoch:
github aio-libs / aiokafka / aiokafka / conn.py View on Github external
async def _do_sasl_handshake(self):
        # NOTE: We will only fallback to v0.9 gssapi scheme if user explicitly
        #       stated, that api_version is "0.9"
        if self._version_hint and self._version_hint < (0, 10):
            handshake_klass = None
            assert self._sasl_mechanism == 'GSSAPI', (
                "Only GSSAPI supported for v0.9"
            )
        else:
            handshake_klass = self._version_info.pick_best(
                SaslHandShakeRequest)

            sasl_handshake = handshake_klass(self._sasl_mechanism)
            response = await self.send(sasl_handshake)
            error_type = Errors.for_code(response.error_code)
            if error_type is not Errors.NoError:
                error = error_type(self)
                self.close(reason=CloseReason.AUTH_FAILURE, exc=error)
                raise error

            if self._sasl_mechanism not in response.enabled_mechanisms:
                exc = Errors.UnsupportedSaslMechanismError(
                    'Kafka broker does not support %s sasl mechanism. '
                    'Enabled mechanisms are: %s'
                    % (self._sasl_mechanism, response.enabled_mechanisms))
                self.close(reason=CloseReason.AUTH_FAILURE, exc=exc)
                raise exc

        assert self._sasl_mechanism in ('PLAIN', 'GSSAPI')
        if self._security_protocol == 'SASL_PLAINTEXT' and \
           self._sasl_mechanism == 'PLAIN':
            self.log.warning(
github aio-libs / aiokafka / aiokafka / cluster.py View on Github external
_new_partitions = {}
        _new_broker_partitions = collections.defaultdict(set)
        _new_unauthorized_topics = set()
        _new_internal_topics = set()

        for topic_data in metadata.topics:
            if metadata.API_VERSION == 0:
                error_code, topic, partitions = topic_data
                is_internal = False
            else:
                error_code, topic, is_internal, partitions = topic_data
            if is_internal:
                _new_internal_topics.add(topic)
            error_type = Errors.for_code(error_code)
            if error_type is Errors.NoError:
                _new_partitions[topic] = {}
                for p_error, partition, leader, replicas, isr in partitions:
                    _new_partitions[topic][partition] = PartitionMetadata(
                        topic=topic, partition=partition, leader=leader,
                        replicas=replicas, isr=isr, error=p_error)
                    if leader != -1:
                        _new_broker_partitions[leader].add(
                            TopicPartition(topic, partition))

            elif error_type is Errors.LeaderNotAvailableError:
                log.warning("Topic %s is not available during auto-create"
                            " initialization", topic)
            elif error_type is Errors.UnknownTopicOrPartitionError:
                log.error("Topic %s not found in cluster metadata", topic)
            elif error_type is Errors.TopicAuthorizationFailedError:
                log.error("Topic %s is not authorized for this client", topic)
github aio-libs / aiokafka / aiokafka / client.py View on Github external
def _check_api_version_response(self, response):
        # The logic here is to check the list of supported request versions
        # in descending order. As soon as we find one that works, return it
        test_cases = [
            # format (, )
            ((2, 1, 0), MetadataRequest[0].API_KEY, 7),
            ((1, 1, 0), FetchRequest[0].API_KEY, 7),
            ((1, 0, 0), MetadataRequest[0].API_KEY, 5),
            ((0, 11, 0), MetadataRequest[0].API_KEY, 4),
            ((0, 10, 2), OffsetFetchRequest[0].API_KEY, 2),
            ((0, 10, 1), MetadataRequest[0].API_KEY, 2),
        ]

        error_type = Errors.for_code(response.error_code)
        assert error_type is Errors.NoError, "API version check failed"
        max_versions = dict([
            (api_key, max_version)
            for api_key, _, max_version in response.api_versions
        ])
        # Get the best match of test cases
        for broker_version, api_key, version in test_cases:
            if max_versions.get(api_key, -1) >= version:
                return broker_version

        # We know that ApiVersionResponse is only supported in 0.10+
        # so if all else fails, choose that
        return (0, 10, 0)