How to use the s3transfer.exceptions.RetriesExceededError function in s3transfer

To help you get started, we’ve selected a few s3transfer examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github boto / s3transfer / tests / functional / test_download.py View on Github external
def test_retry_failure(self):
        self.add_head_object_response()

        max_retries = 3
        self.config.num_download_attempts = max_retries
        self._manager = TransferManager(self.client, self.config)
        # Add responses that fill up the maximum number of retries.
        self.add_n_retryable_get_object_responses(max_retries)

        future = self.manager.download(**self.create_call_kwargs())

        # A retry exceeded error should have happened.
        with self.assertRaises(RetriesExceededError):
            future.result()

        # All of the retries should have been used up.
        self.stubber.assert_no_pending_responses()
github boto / s3transfer / tests / unit / test_download.py View on Github external
def test_retries_failure(self):
        for _ in range(self.max_attempts):
            self.stubber.add_response(
                'get_object', service_response={
                    'Body': StreamWithError(self.stream, SOCKET_ERROR)
                },
                expected_params={'Bucket': self.bucket, 'Key': self.key}
            )

        task = self.get_download_task()
        task()
        self.transfer_coordinator.announce_done()

        # Should have failed out on a RetriesExceededError
        with self.assertRaises(RetriesExceededError):
            self.transfer_coordinator.result()
        self.stubber.assert_no_pending_responses()
github boto / s3transfer / s3transfer / __init__.py View on Github external
max_attempts = self._config.num_download_attempts
        last_exception = None
        for i in range(max_attempts):
            try:
                return self._do_get_object(bucket, key, filename,
                                           extra_args, callback)
            except (socket.timeout, socket.error,
                    ReadTimeoutError, IncompleteReadError) as e:
                # TODO: we need a way to reset the callback if the
                # download failed.
                logger.debug("Retrying exception caught (%s), "
                             "retrying request, (attempt %s / %s)", e, i,
                             max_attempts, exc_info=True)
                last_exception = e
                continue
        raise RetriesExceededError(last_exception)
github boto / s3transfer / s3transfer / __init__.py View on Github external
response['Body'], callback)
                    buffer_size = 1024 * 16
                    current_index = part_size * part_index
                    for chunk in iter(lambda: streaming_body.read(buffer_size),
                                      b''):
                        self._ioqueue.put((current_index, chunk))
                        current_index += len(chunk)
                    return
                except (socket.timeout, socket.error,
                        ReadTimeoutError, IncompleteReadError) as e:
                    logger.debug("Retrying exception caught (%s), "
                                 "retrying request, (attempt %s / %s)", e, i,
                                 max_attempts, exc_info=True)
                    last_exception = e
                    continue
            raise RetriesExceededError(last_exception)
        finally:
            logger.debug("EXITING _download_range for part: %s", part_index)
github boto / s3transfer / s3transfer / legacy.py View on Github external
response['Body'], callback)
                    buffer_size = 1024 * 16
                    current_index = part_size * part_index
                    for chunk in iter(lambda: streaming_body.read(buffer_size),
                                      b''):
                        self._ioqueue.put((current_index, chunk))
                        current_index += len(chunk)
                    return
                except (socket.timeout, socket.error,
                        ReadTimeoutError, IncompleteReadError) as e:
                    logger.debug("Retrying exception caught (%s), "
                                 "retrying request, (attempt %s / %s)", e, i,
                                 max_attempts, exc_info=True)
                    last_exception = e
                    continue
            raise RetriesExceededError(last_exception)
        finally:
            logger.debug("EXITING _download_range for part: %s", part_index)
github boto / s3transfer / s3transfer / processpool.py View on Github external
def _do_get_object(self, bucket, key, extra_args, temp_filename, offset):
        last_exception = None
        for i in range(self._MAX_ATTEMPTS):
            try:
                response = self._client.get_object(
                    Bucket=bucket, Key=key, **extra_args)
                self._write_to_file(temp_filename, offset, response['Body'])
                return
            except S3_RETRYABLE_DOWNLOAD_ERRORS as e:
                logger.debug('Retrying exception caught (%s), '
                             'retrying request, (attempt %s / %s)', e, i+1,
                             self._MAX_ATTEMPTS, exc_info=True)
                last_exception = e
        raise RetriesExceededError(last_exception)
github boto / s3transfer / s3transfer / legacy.py View on Github external
max_attempts = self._config.num_download_attempts
        last_exception = None
        for i in range(max_attempts):
            try:
                return self._do_get_object(bucket, key, filename,
                                           extra_args, callback)
            except (socket.timeout, socket.error,
                    ReadTimeoutError, IncompleteReadError) as e:
                # TODO: we need a way to reset the callback if the
                # download failed.
                logger.debug("Retrying exception caught (%s), "
                             "retrying request, (attempt %s / %s)", e, i,
                             max_attempts, exc_info=True)
                last_exception = e
                continue
        raise RetriesExceededError(last_exception)
github boto / s3transfer / s3transfer / exceptions.py View on Github external
def __init__(self, last_exception, msg='Max Retries Exceeded'):
        super(RetriesExceededError, self).__init__(msg)
        self.last_exception = last_exception