Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_get_session_token(self):
session = botocore.session.get_session()
sns = session.get_service('sts')
op = sns.get_operation('GetSessionToken')
params = {}
endpoint = self.get_mocked_endpoint()
with self.assertRaises(NoCredentialsError):
endpoint.make_request(op, params)
def setUp(self):
super(TestCreateCredentialResolver, self).setUp()
self.session = mock.Mock(spec=botocore.session.Session)
self.session.get_component = self.fake_get_component
self.fake_instance_variables = {
'credentials_file': 'a',
'legacy_config_file': 'b',
'config_file': 'c',
'metadata_service_timeout': 1,
'metadata_service_num_attempts': 1,
}
self.config_loader = ConfigValueStore()
for name, value in self.fake_instance_variables.items():
self.config_loader.set_config_variable(name, value)
self.session.get_config_variable = \
self.config_loader.get_config_variable
self.session.set_config_variable = \
def setUp(self):
self.session = botocore.session.get_session()
self.service = self.session.get_service('sqs', 'aws')
def test_only_dynamodb_calls_are_traced():
"""Test only a single subsegment is created for other AWS services.
As the pynamodb patch applies the botocore patch as well, we need
to ensure that only one subsegment is created for all calls not
made by PynamoDB. As PynamoDB calls botocore differently than the
botocore patch expects we also just get a single subsegment per
PynamoDB call.
"""
session = botocore.session.get_session()
s3 = session.create_client('s3', region_name='us-west-2',
config=Config(signature_version=UNSIGNED))
try:
s3.get_bucket_location(Bucket='mybucket')
except ClientError:
pass
subsegments = xray_recorder.current_segment().subsegments
assert len(subsegments) == 1
assert subsegments[0].name == 's3'
assert len(subsegments[0].subsegments) == 0
def setUp(self):
self.session = botocore.session.get_session()
self.client = self.session.create_client('apigateway', 'us-east-1')
# Create a resource to use with this client.
self.api_name = 'mytestapi'
self.api_id = self.create_rest_api_or_skip()
# language governing permissions and limitations under the License.
"""Unit test suite to validate aws_encryption_sdk.key_providers.kms.KMSMasterKeyProviderConfig"""
import botocore.session
import pytest
from aws_encryption_sdk.key_providers.base import MasterKeyProviderConfig
from aws_encryption_sdk.key_providers.kms import KMSMasterKeyProviderConfig
from .unit_test_utils import all_invalid_kwargs, all_valid_kwargs
pytestmark = [pytest.mark.unit, pytest.mark.local]
VALID_KWARGS = {
KMSMasterKeyProviderConfig: [
dict(botocore_session=botocore.session.Session(), key_ids=(), region_names=()),
dict(botocore_session=botocore.session.Session(), key_ids=[], region_names=()),
dict(botocore_session=botocore.session.Session(), key_ids=(), region_names=[]),
dict(botocore_session=botocore.session.Session(), region_names=()),
dict(botocore_session=botocore.session.Session(), key_ids=()),
dict(botocore_session=botocore.session.Session()),
]
}
INVALID_KWARGS = {KMSMasterKeyProviderConfig: [dict(botocore_session=None)]}
@pytest.mark.parametrize("cls, kwargs", all_valid_kwargs(VALID_KWARGS))
def test_attributes_valid_kwargs(cls, kwargs):
cls(**kwargs)
@pytest.mark.parametrize("cls, kwargs", all_invalid_kwargs(VALID_KWARGS, INVALID_KWARGS))
def _components(self):
if isinstance(self, Session):
try:
return self.__dict__["_components"]
except KeyError:
raise AttributeError("_components")
proxy_components = botocore.session.Session._proxy_components
if self not in proxy_components:
proxy_components[self] = botocore.session.ComponentLocator()
self._register_components()
return proxy_components[self]
def setUp(self):
self.files = FileCreator()
self.session = botocore.session.get_session()
self.regions = {}
self.region = 'us-west-2'
self.client = self.session.create_client('s3', region_name=self.region)
self.extra_setup()
def _create_google_default_config():
config = type('', (), {})()
# Use botocore session API to get defaults
session = botocore.session.Session()
# region: The default AWS region that this script will connect
# to for all API calls
config.region = session.get_config_variable('region') or 'eu-central-1'
# aws cli profile to store config and access keys into
config.profile = session.profile or None
# output format: The AWS CLI output format that will be configured in the
# adf profile (affects subsequent CLI calls)
config.output_format = session.get_config_variable('format') or 'json'
# aws credential location: The file where this script will store the temp
# credentials under the configured profile
config.aws_credentials_location = os.path.expanduser(session.get_config_variable('credentials_file'))
config.aws_config_location = os.path.expanduser(session.get_config_variable('config_file'))
def env_from_profile(ctx, param, value):
if not value:
return
session = botocore.session.Session(profile=value)
options = session.get_scoped_config()
for opt in options:
env_opt = opt.upper()
if env_opt.startswith(ctx.auto_envvar_prefix):
os.environ.setdefault(env_opt, options[opt])
return value