Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
@mock_s3_deprecated
def test_bucket_key_listing_order():
conn = boto.connect_s3()
bucket = conn.create_bucket("test_bucket")
prefix = "toplevel/"
def store(name):
k = Key(bucket, prefix + name)
k.set_contents_from_string("somedata")
names = ["x/key", "y.key1", "y.key2", "y.key3", "x/y/key", "x/y/z/key"]
for name in names:
store(name)
delimiter = None
keys = [x.name for x in bucket.list(prefix, delimiter)]
@mock_s3_deprecated
def test_list_versions():
conn = boto.connect_s3("the_key", "the_secret")
bucket = conn.create_bucket("foobar")
bucket.configure_versioning(versioning=True)
key_versions = []
key = Key(bucket, "the-key")
key.version_id.should.be.none
key.set_contents_from_string("Version 1")
key_versions.append(key.version_id)
key.set_contents_from_string("Version 2")
key_versions.append(key.version_id)
key_versions.should.have.length_of(2)
versions = list(bucket.list_versions())
@mock_s3_deprecated
def test_key_with_reduced_redundancy():
conn = boto.connect_s3()
bucket = conn.create_bucket("test_bucket_name")
key = Key(bucket, "test_rr_key")
key.set_contents_from_string("value1", reduced_redundancy=True)
# we use the bucket iterator because of:
# https:/github.com/boto/boto/issues/1173
list(bucket)[0].storage_class.should.equal("REDUCED_REDUNDANCY")
@mock_s3_deprecated
def test_get_raw_dump_not_found(self, boto_helper):
boto_helper.get_or_create_bucket("crashstats")
boto_s3_store = self.get_s3_store()
with pytest.raises(CrashIDNotFound):
boto_s3_store.get(
uuid="0bba929f-8721-460c-dead-a43c20071027", datatype="raw"
)
@mock_s3_deprecated
def test_missing_key_urllib2():
conn = boto.connect_s3("the_key", "the_secret")
conn.create_bucket("foobar")
urlopen.when.called_with("http://foobar.s3.amazonaws.com/the-key").should.throw(
HTTPError
)
@mock_s3_deprecated
def test_no_crashes(self, mock_futures, boto_helper):
"""Verify no crashes in bucket result in no missing crashes."""
boto_helper.get_or_create_bucket('crashstats')
with self.get_app() as app:
missing = app.find_missing(TODAY)
assert missing == []
@mock_s3_deprecated
def test_no_missing_crashes(self, mock_futures, boto_helper):
"""Verify raw crashes with processed crashes result in no missing crashes."""
boto_helper.get_or_create_bucket('crashstats')
# Create a couple raw and processed crashes
crashids = [
create_new_ooid(),
create_new_ooid(),
create_new_ooid(),
]
for crashid in crashids:
boto_helper.set_contents_from_string(
bucket_name='crashstats',
key='/v2/raw_crash/%s/%s/%s' % (crashid[0:3], TODAY, crashid),
value='test'
)
@mock_s3_deprecated
@reduced_min_part_size
def test_multipart_upload_cancel():
conn = boto.connect_s3("the_key", "the_secret")
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b"0" * REDUCED_PART_SIZE
multipart.upload_part_from_file(BytesIO(part1), 1)
multipart.cancel_upload()
# TODO we really need some sort of assertion here, but we don't currently
@mock_s3_deprecated
def test_post_to_bucket():
conn = create_connection("the_key", "the_secret")
bucket = conn.create_bucket("foobar")
requests.post(
"https://s3.amazonaws.com/foobar", {"key": "the-key", "file": "nothing"}
)
bucket.get_key("the-key").get_contents_as_string().should.equal(b"nothing")
@mock_s3_deprecated
def test_post_with_metadata_to_bucket():
conn = create_connection("the_key", "the_secret")
bucket = conn.create_bucket("foobar")
requests.post(
"https://s3.amazonaws.com/foobar",
{"key": "the-key", "file": "nothing", "x-amz-meta-test": "metadata"},
)
bucket.get_key("the-key").get_metadata("test").should.equal("metadata")