Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
@mock_s3
def test_follow_symlink_omitted(self):
"""Same as test_follow_symlink_false, but default behavior."""
with self.temp_directory_with_files() as temp_dir1:
root1 = temp_dir1.path
with self.temp_directory_with_files() as temp_dir2:
root2 = temp_dir2.path
os.symlink(root1 + "/f1", root2 + "/f3")
results = self.run_hook(functions={
'MyFunction': {
'path': root2}
})
self.assertIsNotNone(results)
code = results.get('MyFunction')
self.assertIsInstance(code, Code)
self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [
@moto.mock_s3
def test_remote_no_push_managed_s3():
api.delete_context(TEST_CONTEXT)
api.context(context_name=TEST_CONTEXT)
# Setup moto s3 resources
s3_client = boto3.client('s3')
s3_resource = boto3.resource('s3')
s3_resource.create_bucket(Bucket=TEST_BUCKET)
# Make sure bucket is empty
objects = s3_client.list_objects(Bucket=TEST_BUCKET)
assert 'Contents' not in objects, 'Bucket should be empty'
# Bind remote context
api.remote(TEST_CONTEXT, TEST_REMOTE, TEST_BUCKET_URL)
@mock_s3
def test_load_aws_s3_error():
source = "s3://ryft-public-sample-data/"
sink = "ignore.deb"
with pytest.raises(ValueError):
cfg_load.remote.load(source, sink)
@mock_s3
def test_path_missing(self):
"""Test path missing."""
msg = "missing required property 'path' in function 'MyFunction'"
with ShouldRaise(ValueError(msg)):
self.run_hook(functions={
'MyFunction': {
}
@moto.mock_s3
@test_context
def test_get_batch_found(client):
batch = mkbatch()
batch['files']['two'] = {
'algorithm': 'sha512',
'size': len(TWO),
'digest': TWO_DIGEST,
'visibility': 'public',
}
with set_time():
resp = upload_batch(client, batch)
eq_(resp.status_code, 200, resp.data)
resp = client.get('/tooltool/upload/1')
eq_(resp.status_code, 200, resp.data)
eq_(json.loads(resp.data)['result'], {
"author": "me",
@mock_s3
@mock_s3_deprecated
def test_post_to_bucket():
conn = boto.connect_s3("the_key", "the_secret")
bucket = conn.create_bucket("foobar")
requests.post(
"https://foobar.s3.amazonaws.com/", {"key": "the-key", "file": "nothing"}
)
bucket.get_key("the-key").get_contents_as_string().should.equal(b"nothing")
@mock_s3
def test_can_initialize(self):
# if key doesn't exist, initialization should still succeed in case key
# is written after daemon is started
b = S3BackupConfig(
"s3://fake-bucket/fake-backup-conf.yml")
@mock_s3
@pytest.mark.slow
def test_log_time_delta(self):
_setup_module()
logger = S3StreamLogger(bucket_name, prefix, hours=.00025)
logger.log('testtest')
time.sleep(1)
logger.log('testtest')
logger.close()
objs = list(bucket.objects.filter(Prefix=prefix))
assert len(objs) == 2, 'Should be two object'
@mock_s3
def test_s3_storage_class_standard():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
# add an object to the bucket with standard storage
s3.put_object(Bucket="Bucket", Key="my_key", Body="my_value")
list_of_objects = s3.list_objects(Bucket="Bucket")
list_of_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
def setUpMock(self):
"""
Let Moto take over all socket communications
"""
self.ec2mock = mock_ec2()
self.ec2mock.start()
self.s3mock = mock_s3()
self.s3mock.start()
self.route53mock = mock_route53()
self.route53mock.start()
responses.add(
responses.GET,
self.AWS_INSTANCE_DATA_DEFAULT_URL,
body=u"""
[