cfg.readfp(f)
global prefix
+ global location
try:
template = cfg.get('fixtures', 'bucket prefix')
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
template = 'test-{random}-'
prefix = choose_bucket_prefix(template=template)
+ try:
+ location = cfg.get('region main', 'name')
+ except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
+ location = ''
+
s3.clear()
config.clear()
calling_formats = dict(
return name
-def get_new_bucket(connection=None):
+def get_new_bucket(connection=None, name=None, headers=None):
"""
Get a bucket that exists and is empty.
"""
if connection is None:
connection = s3.main
- name = get_new_bucket_name()
+ if name is None:
+ name = get_new_bucket_name()
# the only way for this to fail with a pre-existing bucket is if
# someone raced us between setup nuke_prefixed_buckets and here;
# ignore that as astronomically unlikely
- bucket = connection.create_bucket(name)
+ bucket = connection.create_bucket(name, location=location, headers=headers)
return bucket
def test_bucket_create_delete():
name = '{prefix}foo'.format(prefix=get_prefix())
print 'Trying bucket {name!r}'.format(name=name)
- bucket = s3.main.create_bucket(name)
+ bucket = get_new_bucket(s3.main, name)
# make sure it's actually there
s3.main.get_bucket(bucket.name)
bucket.delete()
Attempt to create a bucket with a specified name, and confirm
that the request fails because of an invalid bucket name.
"""
- e = assert_raises(boto.exception.S3ResponseError, s3.main.create_bucket, name)
+ e = assert_raises(boto.exception.S3ResponseError, get_new_bucket, s3.main, name)
eq(e.status, 400)
eq(e.reason, 'Bad Request')
eq(e.error_code, 'InvalidBucketName')
def test_bucket_create_naming_bad_short_empty():
# bucket creates where name is empty look like PUTs to the parent
# resource (with slash), hence their error response is different
- e = assert_raises(boto.exception.S3ResponseError, s3.main.create_bucket, '')
+ e = assert_raises(boto.exception.S3ResponseError, get_new_bucket, s3.main, '')
eq(e.status, 405)
eq(e.reason, 'Method Not Allowed')
eq(e.error_code, 'MethodNotAllowed')
# should be very rare
if _prefix is None:
_prefix = get_prefix()
- s3.main.create_bucket('{prefix}{name}'.format(
+ get_new_bucket(s3.main, '{prefix}{name}'.format(
prefix=_prefix,
name=name,
))
prefix = get_prefix()
assert len(prefix) < 255
num = length - len(prefix)
- s3.main.create_bucket('{prefix}{name}'.format(
+ get_new_bucket(s3.main, '{prefix}{name}'.format(
prefix=prefix,
name=num*'a',
))
prefix = get_prefix()
length = 251
num = length - len(prefix)
- bucket = s3.main.create_bucket('{prefix}{name}'.format(
+ bucket = get_new_bucket(s3.main, '{prefix}{name}'.format(
prefix=prefix,
name=num*'a',
))
@attr(operation='re-create')
@attr(assertion='idempotent success')
def test_bucket_create_exists():
- bucket = get_new_bucket()
+ bucket = get_new_bucket(s3.main)
# REST idempotency means this should be a nop
- s3.main.create_bucket(bucket.name)
+ get_new_bucket(s3.main, bucket.name)
@attr(resource='bucket')
# Names are shared across a global namespace. As such, no two
# users can create a bucket with that same name.
bucket = get_new_bucket()
- e = assert_raises(boto.exception.S3CreateError, s3.alt.create_bucket, bucket.name)
+ e = assert_raises(boto.exception.S3CreateError, get_new_bucket, s3.alt, bucket.name)
eq(e.status, 409)
eq(e.reason, 'Conflict')
eq(e.error_code, 'BucketAlreadyExists')
@attr('fails_on_dho')
def test_bucket_header_acl_grants():
headers = _get_acl_header()
- bucket = s3.main.create_bucket(get_prefix(), headers=headers)
+ bucket = get_new_bucket(s3.main, get_prefix(), headers)
policy = bucket.get_acl()
check_grants(
@attr('fails_on_rgw')
def test_logging_toggle():
bucket = get_new_bucket()
- log_bucket = s3.main.create_bucket(bucket.name + '-log')
+ log_bucket = get_new_bucket(s3.main, bucket.name + '-log')
log_bucket.set_as_logging_target()
bucket.enable_logging(target_bucket=log_bucket, target_prefix=bucket.name)
bucket.disable_logging()
names = [e.name for e in list(li)]
eq(names, key_names)
- bucket2 = s3.main.create_bucket(bucket.name)
+ bucket2 = get_new_bucket(s3.main, bucket.name)
li = bucket.list()