import botocore.session
from botocore.exceptions import ClientError
from botocore.exceptions import ParamValidationError
-from nose.tools import eq_ as eq
from nose.plugins.attrib import attr
import isodate
import email.utils
def test_bucket_list_empty():
bucket = get_new_bucket_resource()
is_empty = _bucket_is_empty(bucket)
- eq(is_empty, True)
+ assert is_empty == True
@attr(resource='bucket')
@attr(method='get')
bucket2 = get_new_bucket_resource()
obj = bucket1.put_object(Body='str', Key='asdf')
is_empty = _bucket_is_empty(bucket2)
- eq(is_empty, True)
+ assert is_empty == True
def _create_objects(bucket=None, bucket_name=None, keys=[]):
"""
response = client.list_objects(Bucket=bucket_name, MaxKeys=2)
keys = _get_keys(response)
- eq(len(keys), 2)
- eq(keys, ['bar', 'baz'])
- eq(response['IsTruncated'], True)
+ assert len(keys) == 2
+ assert keys == ['bar', 'baz']
+ assert response['IsTruncated'] == True
response = client.list_objects(Bucket=bucket_name, Marker='baz',MaxKeys=2)
keys = _get_keys(response)
- eq(len(keys), 1)
- eq(response['IsTruncated'], False)
- eq(keys, ['foo'])
+ assert len(keys) == 1
+ assert response['IsTruncated'] == False
+ assert keys == ['foo']
@attr(resource='bucket')
@attr(method='get')
response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=2)
keys = _get_keys(response)
- eq(len(keys), 2)
- eq(keys, ['bar', 'baz'])
- eq(response['IsTruncated'], True)
+ assert len(keys) == 2
+ assert keys == ['bar', 'baz']
+ assert response['IsTruncated'] == True
response = client.list_objects_v2(Bucket=bucket_name, StartAfter='baz',MaxKeys=2)
keys = _get_keys(response)
- eq(len(keys), 1)
- eq(response['IsTruncated'], False)
- eq(keys, ['foo'])
+ assert len(keys) == 1
+ assert response['IsTruncated'] == False
+ assert keys == ['foo']
@attr(resource='bucket')
@attr(method='get')
for j in range(5):
client.put_object(Bucket=bucket_name, Key=str(j))
response1 = client.list_objects_v2(Bucket=bucket_name)
- eq(response1['KeyCount'], 5)
+ assert response1['KeyCount'] == 5
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Delimiter='/')
- eq(response['Delimiter'], '/')
+ assert response['Delimiter'] == '/'
keys = _get_keys(response)
- eq(keys, ['asdf'])
+ assert keys == ['asdf']
prefixes = _get_prefixes(response)
- eq(len(prefixes), 2)
- eq(prefixes, ['foo/', 'quux/'])
+ assert len(prefixes) == 2
+ assert prefixes == ['foo/', 'quux/']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Delimiter='/')
- eq(response['Delimiter'], '/')
+ assert response['Delimiter'] == '/'
keys = _get_keys(response)
- eq(keys, ['asdf'])
+ assert keys == ['asdf']
prefixes = _get_prefixes(response)
- eq(len(prefixes), 2)
- eq(prefixes, ['foo/', 'quux/'])
- eq(response['KeyCount'], len(prefixes) + len(keys))
+ assert len(prefixes) == 2
+ assert prefixes == ['foo/', 'quux/']
+ assert response['KeyCount'] == len(prefixes) + len(keys)
@attr(resource='bucket')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Delimiter='/', EncodingType='url')
- eq(response['Delimiter'], '/')
+ assert response['Delimiter'] == '/'
keys = _get_keys(response)
- eq(keys, ['asdf%2Bb'])
+ assert keys == ['asdf%2Bb']
prefixes = _get_prefixes(response)
- eq(len(prefixes), 3)
- eq(prefixes, ['foo%2B1/', 'foo/', 'quux%20ab/'])
+ assert len(prefixes) == 3
+ assert prefixes == ['foo%2B1/', 'foo/', 'quux%20ab/']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Delimiter='/', EncodingType='url')
- eq(response['Delimiter'], '/')
+ assert response['Delimiter'] == '/'
keys = _get_keys(response)
- eq(keys, ['asdf%2Bb'])
+ assert keys == ['asdf%2Bb']
prefixes = _get_prefixes(response)
- eq(len(prefixes), 3)
- eq(prefixes, ['foo%2B1/', 'foo/', 'quux%20ab/'])
+ assert len(prefixes) == 3
+ assert prefixes == ['foo%2B1/', 'foo/', 'quux%20ab/']
def validate_bucket_list(bucket_name, prefix, delimiter, marker, max_keys,
client = get_client()
response = client.list_objects(Bucket=bucket_name, Delimiter=delimiter, Marker=marker, MaxKeys=max_keys, Prefix=prefix)
- eq(response['IsTruncated'], is_truncated)
+ assert response['IsTruncated'] == is_truncated
if 'NextMarker' not in response:
response['NextMarker'] = None
- eq(response['NextMarker'], next_marker)
+ assert response['NextMarker'] == next_marker
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(len(keys), len(check_objs))
- eq(len(prefixes), len(check_prefixes))
- eq(keys, check_objs)
- eq(prefixes, check_prefixes)
+ assert len(keys) == len(check_objs)
+ assert len(prefixes) == len(check_prefixes)
+ assert keys == check_objs
+ assert prefixes == check_prefixes
return response['NextMarker']
else:
params['StartAfter'] = ''
response = client.list_objects_v2(**params)
- eq(response['IsTruncated'], is_truncated)
+ assert response['IsTruncated'] == is_truncated
if 'NextContinuationToken' not in response:
response['NextContinuationToken'] = None
if last:
- eq(response['NextContinuationToken'], None)
+ assert response['NextContinuationToken'] == None
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(len(keys), len(check_objs))
- eq(len(prefixes), len(check_prefixes))
- eq(keys, check_objs)
- eq(prefixes, check_prefixes)
+ assert len(keys) == len(check_objs)
+ assert len(prefixes) == len(check_prefixes)
+ assert keys == check_objs
+ assert prefixes == check_prefixes
return response['NextContinuationToken']
client = get_client()
response = client.list_objects(Bucket=bucket_name, Delimiter='a')
- eq(response['Delimiter'], 'a')
+ assert response['Delimiter'] == 'a'
keys = _get_keys(response)
# foo contains no 'a' and so is a complete key
- eq(keys, ['foo'])
+ assert keys == ['foo']
# bar, baz, and cab should be broken up by the 'a' delimiters
prefixes = _get_prefixes(response)
- eq(len(prefixes), 2)
- eq(prefixes, ['ba', 'ca'])
+ assert len(prefixes) == 2
+ assert prefixes == ['ba', 'ca']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Delimiter='a')
- eq(response['Delimiter'], 'a')
+ assert response['Delimiter'] == 'a'
keys = _get_keys(response)
# foo contains no 'a' and so is a complete key
- eq(keys, ['foo'])
+ assert keys == ['foo']
# bar, baz, and cab should be broken up by the 'a' delimiters
prefixes = _get_prefixes(response)
- eq(len(prefixes), 2)
- eq(prefixes, ['ba', 'ca'])
+ assert len(prefixes) == 2
+ assert prefixes == ['ba', 'ca']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Delimiter='%')
- eq(response['Delimiter'], '%')
+ assert response['Delimiter'] == '%'
keys = _get_keys(response)
# foo contains no 'a' and so is a complete key
- eq(keys, ['foo'])
+ assert keys == ['foo']
prefixes = _get_prefixes(response)
- eq(len(prefixes), 2)
+ assert len(prefixes) == 2
# bar, baz, and cab should be broken up by the 'a' delimiters
- eq(prefixes, ['b%', 'c%'])
+ assert prefixes == ['b%', 'c%']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Delimiter='%')
- eq(response['Delimiter'], '%')
+ assert response['Delimiter'] == '%'
keys = _get_keys(response)
# foo contains no 'a' and so is a complete key
- eq(keys, ['foo'])
+ assert keys == ['foo']
prefixes = _get_prefixes(response)
- eq(len(prefixes), 2)
+ assert len(prefixes) == 2
# bar, baz, and cab should be broken up by the 'a' delimiters
- eq(prefixes, ['b%', 'c%'])
+ assert prefixes == ['b%', 'c%']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Delimiter=' ')
- eq(response['Delimiter'], ' ')
+ assert response['Delimiter'] == ' '
keys = _get_keys(response)
# foo contains no 'a' and so is a complete key
- eq(keys, ['foo'])
+ assert keys == ['foo']
prefixes = _get_prefixes(response)
- eq(len(prefixes), 2)
+ assert len(prefixes) == 2
# bar, baz, and cab should be broken up by the 'a' delimiters
- eq(prefixes, ['b ', 'c '])
+ assert prefixes == ['b ', 'c ']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Delimiter=' ')
- eq(response['Delimiter'], ' ')
+ assert response['Delimiter'] == ' '
keys = _get_keys(response)
# foo contains no 'a' and so is a complete key
- eq(keys, ['foo'])
+ assert keys == ['foo']
prefixes = _get_prefixes(response)
- eq(len(prefixes), 2)
+ assert len(prefixes) == 2
# bar, baz, and cab should be broken up by the 'a' delimiters
- eq(prefixes, ['b ', 'c '])
+ assert prefixes == ['b ', 'c ']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Delimiter='.')
- eq(response['Delimiter'], '.')
+ assert response['Delimiter'] == '.'
keys = _get_keys(response)
# foo contains no 'a' and so is a complete key
- eq(keys, ['foo'])
+ assert keys == ['foo']
prefixes = _get_prefixes(response)
- eq(len(prefixes), 2)
+ assert len(prefixes) == 2
# bar, baz, and cab should be broken up by the 'a' delimiters
- eq(prefixes, ['b.', 'c.'])
+ assert prefixes == ['b.', 'c.']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Delimiter='.')
- eq(response['Delimiter'], '.')
+ assert response['Delimiter'] == '.'
keys = _get_keys(response)
# foo contains no 'a' and so is a complete key
- eq(keys, ['foo'])
+ assert keys == ['foo']
prefixes = _get_prefixes(response)
- eq(len(prefixes), 2)
+ assert len(prefixes) == 2
# bar, baz, and cab should be broken up by the 'a' delimiters
- eq(prefixes, ['b.', 'c.'])
+ assert prefixes == ['b.', 'c.']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Delimiter='\x0a')
- eq(response['Delimiter'], '\x0a')
+ assert response['Delimiter'] == '\x0a'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Delimiter='\x0a')
- eq(response['Delimiter'], '\x0a')
+ assert response['Delimiter'] == '\x0a'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
response = client.list_objects(Bucket=bucket_name, Delimiter='')
# putting an empty value into Delimiter will not return a value in the response
- eq('Delimiter' in response, False)
+ assert not 'Delimiter' in response
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
response = client.list_objects_v2(Bucket=bucket_name, Delimiter='')
# putting an empty value into Delimiter will not return a value in the response
- eq('Delimiter' in response, False)
+ assert not 'Delimiter' in response
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
response = client.list_objects(Bucket=bucket_name)
# putting an empty value into Delimiter will not return a value in the response
- eq('Delimiter' in response, False)
+ assert not 'Delimiter' in response
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
response = client.list_objects_v2(Bucket=bucket_name)
# putting an empty value into Delimiter will not return a value in the response
- eq('Delimiter' in response, False)
+ assert not 'Delimiter' in response
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr('list-objects-v2')
@pytest.mark.list_objects_v2
response = client.list_objects_v2(Bucket=bucket_name, FetchOwner=True)
objs_list = response['Contents']
- eq('Owner' in objs_list[0], True)
+ assert 'Owner' in objs_list[0]
@attr('list-objects-v2')
@pytest.mark.list_objects_v2
response = client.list_objects_v2(Bucket=bucket_name)
objs_list = response['Contents']
- eq('Owner' in objs_list[0], False)
+ assert not 'Owner' in objs_list[0]
@attr('list-objects-v2')
@pytest.mark.list_objects_v2
response = client.list_objects_v2(Bucket=bucket_name, FetchOwner= False)
objs_list = response['Contents']
- eq('Owner' in objs_list[0], False)
-
-
-
+ assert not 'Owner' in objs_list[0]
@attr(resource='bucket')
@attr(method='get')
response = client.list_objects(Bucket=bucket_name, Delimiter='/')
# putting an empty value into Delimiter will not return a value in the response
- eq(response['Delimiter'], '/')
+ assert response['Delimiter'] == '/'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
response = client.list_objects_v2(Bucket=bucket_name, Delimiter='/')
# putting an empty value into Delimiter will not return a value in the response
- eq(response['Delimiter'], '/')
+ assert response['Delimiter'] == '/'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr(resource='bucket')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Delimiter='/')
- eq(response['Delimiter'], '/')
+ assert response['Delimiter'] == '/'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names2)
- eq(prefixes, ['0/'])
+ assert keys == key_names2
+ assert prefixes == ['0/']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Prefix='foo/')
- eq(response['Prefix'], 'foo/')
+ assert response['Prefix'] == 'foo/'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, ['foo/bar', 'foo/baz'])
- eq(prefixes, [])
+ assert keys == ['foo/bar', 'foo/baz']
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Prefix='foo/')
- eq(response['Prefix'], 'foo/')
+ assert response['Prefix'] == 'foo/'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, ['foo/bar', 'foo/baz'])
- eq(prefixes, [])
+ assert keys == ['foo/bar', 'foo/baz']
+ assert prefixes == []
# just testing that we can do the delimeter and prefix logic on non-slashes
@attr(resource='bucket')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Prefix='ba')
- eq(response['Prefix'], 'ba')
+ assert response['Prefix'] == 'ba'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, ['bar', 'baz'])
- eq(prefixes, [])
+ assert keys == ['bar', 'baz']
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Prefix='ba')
- eq(response['Prefix'], 'ba')
+ assert response['Prefix'] == 'ba'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, ['bar', 'baz'])
- eq(prefixes, [])
+ assert keys == ['bar', 'baz']
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Prefix='')
- eq(response['Prefix'], '')
+ assert response['Prefix'] == ''
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Prefix='')
- eq(response['Prefix'], '')
+ assert response['Prefix'] == ''
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Prefix='')
- eq(response['Prefix'], '')
+ assert response['Prefix'] == ''
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Prefix='')
- eq(response['Prefix'], '')
+ assert response['Prefix'] == ''
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, key_names)
- eq(prefixes, [])
+ assert keys == key_names
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Prefix='d')
- eq(response['Prefix'], 'd')
+ assert response['Prefix'] == 'd'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, [])
- eq(prefixes, [])
+ assert keys == []
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Prefix='d')
- eq(response['Prefix'], 'd')
+ assert response['Prefix'] == 'd'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, [])
- eq(prefixes, [])
+ assert keys == []
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Prefix='\x0a')
- eq(response['Prefix'], '\x0a')
+ assert response['Prefix'] == '\x0a'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, [])
- eq(prefixes, [])
+ assert keys == []
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Prefix='\x0a')
- eq(response['Prefix'], '\x0a')
+ assert response['Prefix'] == '\x0a'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, [])
- eq(prefixes, [])
+ assert keys == []
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Delimiter='/', Prefix='foo/')
- eq(response['Prefix'], 'foo/')
- eq(response['Delimiter'], '/')
+ assert response['Prefix'] == 'foo/'
+ assert response['Delimiter'] == '/'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, ['foo/bar'])
- eq(prefixes, ['foo/baz/'])
+ assert keys == ['foo/bar']
+ assert prefixes == ['foo/baz/']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Delimiter='/', Prefix='foo/')
- eq(response['Prefix'], 'foo/')
- eq(response['Delimiter'], '/')
+ assert response['Prefix'] == 'foo/'
+ assert response['Delimiter'] == '/'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, ['foo/bar'])
- eq(prefixes, ['foo/baz/'])
+ assert keys == ['foo/bar']
+ assert prefixes == ['foo/baz/']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Delimiter='a', Prefix='ba')
- eq(response['Prefix'], 'ba')
- eq(response['Delimiter'], 'a')
+ assert response['Prefix'] == 'ba'
+ assert response['Delimiter'] == 'a'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, ['bar'])
- eq(prefixes, ['baza'])
+ assert keys == ['bar']
+ assert prefixes == ['baza']
@attr('list-objects-v2')
@pytest.mark.list_objects_v2
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, Delimiter='a', Prefix='ba')
- eq(response['Prefix'], 'ba')
- eq(response['Delimiter'], 'a')
+ assert response['Prefix'] == 'ba'
+ assert response['Delimiter'] == 'a'
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, ['bar'])
- eq(prefixes, ['baza'])
+ assert keys == ['bar']
+ assert prefixes == ['baza']
@attr(resource='bucket')
@attr(method='get')
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, [])
- eq(prefixes, [])
+ assert keys == []
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, [])
- eq(prefixes, [])
+ assert keys == []
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, ['b/a/c', 'b/a/g', 'b/a/r'])
- eq(prefixes, [])
+ assert keys == ['b/a/c', 'b/a/g', 'b/a/r']
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, ['b/a/c', 'b/a/g', 'b/a/r'])
- eq(prefixes, [])
+ assert keys == ['b/a/c', 'b/a/g', 'b/a/r']
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, [])
- eq(prefixes, [])
+ assert keys == []
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
keys = _get_keys(response)
prefixes = _get_prefixes(response)
- eq(keys, [])
- eq(prefixes, [])
+ assert keys == []
+ assert prefixes == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, MaxKeys=1)
- eq(response['IsTruncated'], True)
+ assert response['IsTruncated'] == True
keys = _get_keys(response)
- eq(keys, key_names[0:1])
+ assert keys == key_names[0:1]
response = client.list_objects(Bucket=bucket_name, Marker=key_names[0])
- eq(response['IsTruncated'], False)
+ assert response['IsTruncated'] == False
keys = _get_keys(response)
- eq(keys, key_names[1:])
+ assert keys == key_names[1:]
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=1)
- eq(response['IsTruncated'], True)
+ assert response['IsTruncated'] == True
keys = _get_keys(response)
- eq(keys, key_names[0:1])
+ assert keys == key_names[0:1]
response = client.list_objects_v2(Bucket=bucket_name, StartAfter=key_names[0])
- eq(response['IsTruncated'], False)
+ assert response['IsTruncated'] == False
keys = _get_keys(response)
- eq(keys, key_names[1:])
+ assert keys == key_names[1:]
@attr(resource='bucket')
@attr(method='get')
response = client.list_objects(Bucket=bucket_name, MaxKeys=0)
- eq(response['IsTruncated'], False)
+ assert response['IsTruncated'] == False
keys = _get_keys(response)
- eq(keys, [])
+ assert keys == []
@attr(resource='bucket')
@attr(method='get')
response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=0)
- eq(response['IsTruncated'], False)
+ assert response['IsTruncated'] == False
keys = _get_keys(response)
- eq(keys, [])
+ assert keys == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name)
- eq(response['IsTruncated'], False)
+ assert response['IsTruncated'] == False
keys = _get_keys(response)
- eq(keys, key_names)
- eq(response['MaxKeys'], 1000)
+ assert keys == key_names
+ assert response['MaxKeys'] == 1000
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name)
- eq(response['IsTruncated'], False)
+ assert response['IsTruncated'] == False
keys = _get_keys(response)
- eq(keys, key_names)
- eq(response['MaxKeys'], 1000)
+ assert keys == key_names
+ assert response['MaxKeys'] == 1000
def get_http_response_body(**kwargs):
global http_response_body
xml = ET.fromstring(http_response_body.decode('utf-8'))
parsed = parseXmlToJson(xml)
summary = parsed['Summary']
- eq(summary['QuotaMaxBytes'], '-1')
- eq(summary['QuotaMaxBuckets'], '1000')
- eq(summary['QuotaMaxObjCount'], '-1')
- eq(summary['QuotaMaxBytesPerBucket'], '-1')
- eq(summary['QuotaMaxObjCountPerBucket'], '-1')
+ assert summary['QuotaMaxBytes'] == '-1'
+ assert summary['QuotaMaxBuckets'] == '1000'
+ assert summary['QuotaMaxObjCount'] == '-1'
+ assert summary['QuotaMaxBytesPerBucket'] == '-1'
+ assert summary['QuotaMaxObjCountPerBucket'] == '-1'
@attr(resource='bucket')
@attr(method='head')
client.meta.events.register('after-call.s3.HeadBucket', get_http_response)
client.head_bucket(Bucket=bucket_name)
hdrs = http_response['headers']
- eq(hdrs['X-RGW-Object-Count'], '1')
- eq(hdrs['X-RGW-Bytes-Used'], '3')
- eq(hdrs['X-RGW-Quota-User-Size'], '-1')
- eq(hdrs['X-RGW-Quota-User-Objects'], '-1')
- eq(hdrs['X-RGW-Quota-Max-Buckets'], '1000')
- eq(hdrs['X-RGW-Quota-Bucket-Size'], '-1')
- eq(hdrs['X-RGW-Quota-Bucket-Objects'], '-1')
+ assert hdrs['X-RGW-Object-Count'] == '1'
+ assert hdrs['X-RGW-Bytes-Used'] == '3'
+ assert hdrs['X-RGW-Quota-User-Size'] == '-1'
+ assert hdrs['X-RGW-Quota-User-Objects'] == '-1'
+ assert hdrs['X-RGW-Quota-Max-Buckets'] == '1000'
+ assert hdrs['X-RGW-Quota-Bucket-Size'] == '-1'
+ assert hdrs['X-RGW-Quota-Bucket-Objects'] == '-1'
@attr(resource='bucket')
@attr(method='get')
# test simple retrieval
response = client.list_objects(Bucket=bucket_name, MaxKeys=1000)
unordered_keys_out = _get_keys(response)
- eq(len(keys_in), len(unordered_keys_out))
- eq(keys_in.sort(), unordered_keys_out.sort())
+ assert len(keys_in) == len(unordered_keys_out)
+ assert keys_in.sort() == unordered_keys_out.sort()
# test retrieval with prefix
response = client.list_objects(Bucket=bucket_name,
MaxKeys=1000,
Prefix="abc/")
unordered_keys_out = _get_keys(response)
- eq(5, len(unordered_keys_out))
+ assert 5 == len(unordered_keys_out)
# test incremental retrieval with marker
response = client.list_objects(Bucket=bucket_name, MaxKeys=6)
unordered_keys_out = _get_keys(response)
- eq(6, len(unordered_keys_out))
+ assert 6 == len(unordered_keys_out)
# now get the next bunch
response = client.list_objects(Bucket=bucket_name,
MaxKeys=6,
Marker=unordered_keys_out[-1])
unordered_keys_out2 = _get_keys(response)
- eq(6, len(unordered_keys_out2))
+ assert 6 == len(unordered_keys_out2)
# make sure there's no overlap between the incremental retrievals
intersect = set(unordered_keys_out).intersection(unordered_keys_out2)
- eq(0, len(intersect))
+ assert 0 == len(intersect)
# verify that unordered used with delimiter results in error
e = assert_raises(ClientError,
client.list_objects, Bucket=bucket_name, Delimiter="/")
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidArgument')
+ assert status == 400
+ assert error_code == 'InvalidArgument'
@attr(resource='bucket')
@attr(method='get')
# test simple retrieval
response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=1000)
unordered_keys_out = _get_keys(response)
- eq(len(keys_in), len(unordered_keys_out))
- eq(keys_in.sort(), unordered_keys_out.sort())
+ assert len(keys_in) == len(unordered_keys_out)
+ assert keys_in.sort() == unordered_keys_out.sort()
# test retrieval with prefix
response = client.list_objects_v2(Bucket=bucket_name,
MaxKeys=1000,
Prefix="abc/")
unordered_keys_out = _get_keys(response)
- eq(5, len(unordered_keys_out))
+ assert 5 == len(unordered_keys_out)
# test incremental retrieval with marker
response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=6)
unordered_keys_out = _get_keys(response)
- eq(6, len(unordered_keys_out))
+ assert 6 == len(unordered_keys_out)
# now get the next bunch
response = client.list_objects_v2(Bucket=bucket_name,
MaxKeys=6,
StartAfter=unordered_keys_out[-1])
unordered_keys_out2 = _get_keys(response)
- eq(6, len(unordered_keys_out2))
+ assert 6 == len(unordered_keys_out2)
# make sure there's no overlap between the incremental retrievals
intersect = set(unordered_keys_out).intersection(unordered_keys_out2)
- eq(0, len(intersect))
+ assert 0 == len(intersect)
# verify that unordered used with delimiter results in error
e = assert_raises(ClientError,
client.list_objects, Bucket=bucket_name, Delimiter="/")
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidArgument')
+ assert status == 400
+ assert error_code == 'InvalidArgument'
@attr(resource='bucket')
e = assert_raises(ClientError, client.list_objects, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidArgument')
+ assert status == 400
+ assert error_code == 'InvalidArgument'
client = get_client()
response = client.list_objects(Bucket=bucket_name)
- eq(response['Marker'], '')
+ assert response['Marker'] == ''
@attr(resource='bucket')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Marker='')
- eq(response['Marker'], '')
- eq(response['IsTruncated'], False)
+ assert response['Marker'] == ''
+ assert response['IsTruncated'] == False
keys = _get_keys(response)
- eq(keys, key_names)
+ assert keys == key_names
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, ContinuationToken='')
- eq(response['ContinuationToken'], '')
- eq(response['IsTruncated'], False)
+ assert response['ContinuationToken'] == ''
+ assert response['IsTruncated'] == False
keys = _get_keys(response)
- eq(keys, key_names)
+ assert keys == key_names
@attr(resource='bucket')
@attr(method='get')
next_continuation_token = response1['NextContinuationToken']
response2 = client.list_objects_v2(Bucket=bucket_name, ContinuationToken=next_continuation_token)
- eq(response2['ContinuationToken'], next_continuation_token)
- eq(response2['IsTruncated'], False)
+ assert response2['ContinuationToken'] == next_continuation_token
+ assert response2['IsTruncated'] == False
key_names2 = ['baz', 'foo', 'quxx']
keys = _get_keys(response2)
- eq(keys, key_names2)
+ assert keys == key_names2
@attr(resource='bucket')
@attr(method='get')
next_continuation_token = response1['NextContinuationToken']
response2 = client.list_objects_v2(Bucket=bucket_name, StartAfter='bar', ContinuationToken=next_continuation_token)
- eq(response2['ContinuationToken'], next_continuation_token)
- eq(response2['StartAfter'], 'bar')
- eq(response2['IsTruncated'], False)
+ assert response2['ContinuationToken'] == next_continuation_token
+ assert response2['StartAfter'] == 'bar'
+ assert response2['IsTruncated'] == False
key_names2 = ['foo', 'quxx']
keys = _get_keys(response2)
- eq(keys, key_names2)
+ assert keys == key_names2
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Marker='\x0a')
- eq(response['Marker'], '\x0a')
- eq(response['IsTruncated'], False)
+ assert response['Marker'] == '\x0a'
+ assert response['IsTruncated'] == False
keys = _get_keys(response)
- eq(keys, key_names)
+ assert keys == key_names
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, StartAfter='\x0a')
- eq(response['StartAfter'], '\x0a')
- eq(response['IsTruncated'], False)
+ assert response['StartAfter'] == '\x0a'
+ assert response['IsTruncated'] == False
keys = _get_keys(response)
- eq(keys, key_names)
+ assert keys == key_names
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Marker='blah')
- eq(response['Marker'], 'blah')
+ assert response['Marker'] == 'blah'
keys = _get_keys(response)
- eq(keys, [ 'foo','quxx'])
+ assert keys == [ 'foo','quxx']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, StartAfter='blah')
- eq(response['StartAfter'], 'blah')
+ assert response['StartAfter'] == 'blah'
keys = _get_keys(response)
- eq(keys, ['foo', 'quxx'])
+ assert keys == ['foo', 'quxx']
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects(Bucket=bucket_name, Marker='zzz')
- eq(response['Marker'], 'zzz')
+ assert response['Marker'] == 'zzz'
keys = _get_keys(response)
- eq(response['IsTruncated'], False)
- eq(keys, [])
+ assert response['IsTruncated'] == False
+ assert keys == []
@attr(resource='bucket')
@attr(method='get')
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name, StartAfter='zzz')
- eq(response['StartAfter'], 'zzz')
+ assert response['StartAfter'] == 'zzz'
keys = _get_keys(response)
- eq(response['IsTruncated'], False)
- eq(keys, [])
+ assert response['IsTruncated'] == False
+ assert keys == []
def _compare_dates(datetime1, datetime2):
"""
# both times are in datetime format but datetime1 has
# microseconds and datetime2 does not
datetime1 = datetime1.replace(microsecond=0)
- eq(datetime1, datetime2)
+ assert datetime1 == datetime2
@attr(resource='object')
@attr(method='head')
for obj in objs_list:
key_name = obj['Key']
key_data = data[key_name]
- eq(obj['ETag'],key_data['ETag'])
- eq(obj['Size'],key_data['ContentLength'])
- eq(obj['Owner']['DisplayName'],key_data['DisplayName'])
- eq(obj['Owner']['ID'],key_data['ID'])
+ assert obj['ETag'] == key_data['ETag']
+ assert obj['Size'] == key_data['ContentLength']
+ assert obj['Owner']['DisplayName'] == key_data['DisplayName']
+ assert obj['Owner']['ID'] == key_data['ID']
_compare_dates(obj['LastModified'],key_data['LastModified'])
for obj in objs_list:
key_name = obj['Key']
key_data = data[key_name]
- eq(obj['Owner']['DisplayName'],key_data['DisplayName'])
- eq(obj['ETag'],key_data['ETag'])
- eq(obj['Size'],key_data['ContentLength'])
- eq(obj['Owner']['ID'],key_data['ID'])
- eq(obj['VersionId'], key_data['VersionId'])
+ assert obj['Owner']['DisplayName'] == key_data['DisplayName']
+ assert obj['ETag'] == key_data['ETag']
+ assert obj['Size'] == key_data['ContentLength']
+ assert obj['Owner']['ID'] == key_data['ID']
+ assert obj['VersionId'] == key_data['VersionId']
_compare_dates(obj['LastModified'],key_data['LastModified'])
@attr(resource='bucket')
e = assert_raises(ClientError, unauthenticated_client.list_objects, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
@attr(resource='bucket')
@attr(method='get')
e = assert_raises(ClientError, unauthenticated_client.list_objects_v2, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
@attr(resource='bucket')
@attr(method='get')
e = assert_raises(ClientError, client.list_objects, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchBucket')
+ assert status == 404
+ assert error_code == 'NoSuchBucket'
@attr(resource='bucket')
@attr(method='get')
e = assert_raises(ClientError, client.list_objects_v2, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchBucket')
+ assert status == 404
+ assert error_code == 'NoSuchBucket'
@attr(resource='bucket')
@attr(method='delete')
e = assert_raises(ClientError, client.delete_bucket, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchBucket')
+ assert status == 404
+ assert error_code == 'NoSuchBucket'
@attr(resource='bucket')
@attr(method='delete')
e = assert_raises(ClientError, client.delete_bucket, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 409)
- eq(error_code, 'BucketNotEmpty')
+ assert status == 409
+ assert error_code == 'BucketNotEmpty'
def _do_set_bucket_canned_acl(client, bucket_name, canned_acl, i, results):
try:
_do_wait_completion(t)
for r in results:
- eq(r, True)
+ assert r == True
@attr(resource='object')
@attr(method='put')
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo', Body='foo')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchBucket')
+ assert status == 404
+ assert error_code == 'NoSuchBucket'
@attr(resource='bucket')
e = assert_raises(ClientError, client.delete_bucket, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchBucket')
+ assert status == 404
+ assert error_code == 'NoSuchBucket'
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='bar')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchKey')
+ assert status == 404
+ assert error_code == 'NoSuchKey'
http_response = None
request_id = resp_body_xml.find('.//RequestId').text
assert request_id is not None
- eq(request_id, e.response['ResponseMetadata']['RequestId'])
+ assert request_id == e.response['ResponseMetadata']['RequestId']
def _make_objs_dict(key_names):
objs_list = []
bucket_name = _create_objects(keys=key_names)
client = get_client()
response = client.list_objects(Bucket=bucket_name)
- eq(len(response['Contents']), 3)
+ assert len(response['Contents']) == 3
objs_dict = _make_objs_dict(key_names=key_names)
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
- eq(len(response['Deleted']), 3)
+ assert len(response['Deleted']) == 3
assert 'Errors' not in response
response = client.list_objects(Bucket=bucket_name)
assert 'Contents' not in response
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
- eq(len(response['Deleted']), 3)
+ assert len(response['Deleted']) == 3
assert 'Errors' not in response
response = client.list_objects(Bucket=bucket_name)
assert 'Contents' not in response
bucket_name = _create_objects(keys=key_names)
client = get_client()
response = client.list_objects_v2(Bucket=bucket_name)
- eq(len(response['Contents']), 3)
+ assert len(response['Contents']) == 3
objs_dict = _make_objs_dict(key_names=key_names)
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
- eq(len(response['Deleted']), 3)
+ assert len(response['Deleted']) == 3
assert 'Errors' not in response
response = client.list_objects_v2(Bucket=bucket_name)
assert 'Contents' not in response
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
- eq(len(response['Deleted']), 3)
+ assert len(response['Deleted']) == 3
assert 'Errors' not in response
response = client.list_objects_v2(Bucket=bucket_name)
assert 'Contents' not in response
numKeys = 0
for page in pages:
numKeys += len(page['Contents'])
- eq(numKeys, 1001)
+ assert numKeys == 1001
objs_dict = _make_objs_dict(key_names=key_names)
e = assert_raises(ClientError,client.delete_objects,Bucket=bucket_name,Delete=objs_dict)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='object')
@attr(method='post')
numKeys = 0
for page in pages:
numKeys += len(page['Contents'])
- eq(numKeys, 1001)
+ assert numKeys == 1001
objs_dict = _make_objs_dict(key_names=key_names)
e = assert_raises(ClientError,client.delete_objects,Bucket=bucket_name,Delete=objs_dict)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='object')
@attr(method='put')
client.put_object(Bucket=bucket_name, Key='foo', Body='')
response = client.head_object(Bucket=bucket_name, Key='foo')
- eq(response['ContentLength'], 0)
+ assert response['ContentLength'] == 0
@attr(resource='object')
@attr(method='put')
bucket_name = get_new_bucket()
client = get_client()
response = client.put_object(Bucket=bucket_name, Key='foo', Body='bar')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
- eq(response['ETag'], '"37b51d194a7513e45b56f6524f2d51f2"')
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
+ assert response['ETag'] == '"37b51d194a7513e45b56f6524f2d51f2"'
@attr(resource='object')
@attr(method='put')
client.put_object(Bucket=bucket_name, Key='foo', Body='bar', CacheControl=cache_control)
response = client.head_object(Bucket=bucket_name, Key='foo')
- eq(response['ResponseMetadata']['HTTPHeaders']['cache-control'], cache_control)
+ assert response['ResponseMetadata']['HTTPHeaders']['cache-control'] == cache_control
@attr(resource='object')
@attr(method='put')
# Read
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
# Update
client.put_object(Bucket=bucket_name, Key='foo', Body='soup')
# Read
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'soup')
+ assert body == 'soup'
# Delete
client.delete_object(Bucket=bucket_name, Key='foo')
@attr(assertion='reread what we wrote')
def test_object_set_get_metadata_none_to_good():
got = _set_get_metadata('mymeta')
- eq(got, 'mymeta')
+ assert got == 'mymeta'
@attr(resource='object.metadata')
@attr(method='put')
@attr(assertion='write empty value, returns empty value')
def test_object_set_get_metadata_none_to_empty():
got = _set_get_metadata('')
- eq(got, '')
+ assert got == ''
@attr(resource='object.metadata')
@attr(method='put')
def test_object_set_get_metadata_overwrite_to_empty():
bucket_name = get_new_bucket()
got = _set_get_metadata('oldmeta', bucket_name)
- eq(got, 'oldmeta')
+ assert got == 'oldmeta'
got = _set_get_metadata('', bucket_name)
- eq(got, '')
+ assert got == ''
@attr(resource='object.metadata')
@attr(method='put')
got = response['Metadata']['meta1']
print(got)
print(u"Hello World\xe9")
- eq(got, u"Hello World\xe9")
+ assert got == u"Hello World\xe9"
def _set_get_metadata_unreadable(metadata, bucket_name=None):
"""
response = client.get_object(Bucket=bucket_name, Key='foo')
got = response['Metadata']
- eq(got, {})
+ assert got == {}
@attr(resource='object')
@attr(method='put')
client.put_object(Bucket=bucket_name, Key='foo', Body=data)
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
def _get_post_url(bucket_name):
endpoint = get_config_endpoint()
client.create_bucket(ACL='public-read-write', Bucket=bucket_name)
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key='foo.txt')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key='foo.txt')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='post')
('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key="foo.txt")
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 403)
+ assert r.status_code == 403
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 201)
+ assert r.status_code == 201
message = ET.fromstring(r.content).find('Key')
- eq(message.text,'foo.txt')
+ assert message.text == 'foo.txt'
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
content = r.content.decode()
- eq(content,'')
+ assert content == ''
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', foo_string)])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key='foo.txt')
body = _get_body(response)
- eq(body, foo_string)
+ assert body == foo_string
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('foo.txt', 'bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key='foo.txt')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),("x-ignore-foo" , "bar"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key='\$foo.txt')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='post')
('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 200)
+ assert r.status_code == 200
url = r.url
response = client.get_object(Bucket=bucket_name, Key='foo.txt')
- eq(url,
- '{rurl}?bucket={bucket}&key={key}&etag=%22{etag}%22'.format(rurl = redirect_url,\
- bucket = bucket_name, key = 'foo.txt', etag = response['ETag'].strip('"')))
+ assert url == '{rurl}?bucket={bucket}&key={key}&etag=%22{etag}%22'.format(\
+ rurl = redirect_url, bucket = bucket_name, key = 'foo.txt', etag = response['ETag'].strip('"'))
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 403)
+ assert r.status_code == 403
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 403)
+ assert r.status_code == 403
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 403)
+ assert r.status_code == 403
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('x-amz-meta-foo' , 'barclamp'),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key='foo.txt')
- eq(response['Metadata']['foo'], 'barclamp')
+ assert response['Metadata']['foo'] == 'barclamp'
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 403)
+ assert r.status_code == 403
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 403)
+ assert r.status_code == 403
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('x-amz-meta-foo' , 'barclamp'),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 403)
+ assert r.status_code == 403
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='post')
("Content-Type" , "text/plain"),('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 400)
+ assert r.status_code == 400
@attr(resource='object')
@attr(method='get')
response = client.get_object(Bucket=bucket_name, Key='foo', IfMatch=etag)
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo', IfMatch='"ABCORZ"')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 412)
- eq(error_code, 'PreconditionFailed')
+ assert status == 412
+ assert error_code == 'PreconditionFailed'
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo', IfNoneMatch=etag)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 304)
- eq(e.response['Error']['Message'], 'Not Modified')
+ assert status == 304
+ assert e.response['Error']['Message'] == 'Not Modified'
@attr(resource='object')
@attr(method='get')
response = client.get_object(Bucket=bucket_name, Key='foo', IfNoneMatch='ABCORZ')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='get')
response = client.get_object(Bucket=bucket_name, Key='foo', IfModifiedSince='Sat, 29 Oct 1994 19:43:31 GMT')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo', IfModifiedSince=after_str)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 304)
- eq(e.response['Error']['Message'], 'Not Modified')
+ assert status == 304
+ assert e.response['Error']['Message'] == 'Not Modified'
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo', IfUnmodifiedSince='Sat, 29 Oct 1994 19:43:31 GMT')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 412)
- eq(error_code, 'PreconditionFailed')
+ assert status == 412
+ assert error_code == 'PreconditionFailed'
@attr(resource='object')
@attr(method='get')
response = client.get_object(Bucket=bucket_name, Key='foo', IfUnmodifiedSince='Sat, 29 Oct 2100 19:43:31 GMT')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
etag = response['ETag'].replace('"', '')
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'zar')
+ assert body == 'zar'
@attr(resource='object')
@attr(method='get')
client.put_object(Bucket=bucket_name, Key='foo', Body='bar')
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
# pass in custom header 'If-Match' before PutObject call
lf = (lambda **kwargs: kwargs['params']['headers'].update({'If-Match': '"ABCORZ"'}))
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo', Body='zar')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 412)
- eq(error_code, 'PreconditionFailed')
+ assert status == 412
+ assert error_code == 'PreconditionFailed'
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='put')
client.put_object(Bucket=bucket_name, Key='foo', Body='bar')
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
lf = (lambda **kwargs: kwargs['params']['headers'].update({'If-Match': '*'}))
client.meta.events.register('before-call.s3.PutObject', lf)
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'zar')
+ assert body == 'zar'
@attr(resource='object')
@attr(method='put')
client.meta.events.register('before-call.s3.PutObject', lf)
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo', Body='bar')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 412)
- eq(error_code, 'PreconditionFailed')
+ assert status == 412
+ assert error_code == 'PreconditionFailed'
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchKey')
+ assert status == 404
+ assert error_code == 'NoSuchKey'
@attr(resource='object')
@attr(method='put')
client.put_object(Bucket=bucket_name, Key='foo', Body='bar')
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
lf = (lambda **kwargs: kwargs['params']['headers'].update({'If-None-Match': 'ABCORZ'}))
client.meta.events.register('before-call.s3.PutObject', lf)
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'zar')
+ assert body == 'zar'
@attr(resource='object')
@attr(method='put')
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
etag = response['ETag'].replace('"', '')
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo', Body='zar')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 412)
- eq(error_code, 'PreconditionFailed')
+ assert status == 412
+ assert error_code == 'PreconditionFailed'
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='put')
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='put')
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
lf = (lambda **kwargs: kwargs['params']['headers'].update({'If-None-Match': '*'}))
client.meta.events.register('before-call.s3.PutObject', lf)
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo', Body='zar')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 412)
- eq(error_code, 'PreconditionFailed')
+ assert status == 412
+ assert error_code == 'PreconditionFailed'
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
def _setup_bucket_object_acl(bucket_acl, object_acl):
"""
unauthenticated_client = get_unauthenticated_client()
response = unauthenticated_client.get_object(Bucket=bucket_name, Key='foo')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, unauthenticated_client.get_object, Bucket=bucket_name, Key='foo')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchBucket')
+ assert status == 404
+ assert error_code == 'NoSuchBucket'
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, unauthenticated_client.delete_object, Bucket=bucket_name, Key='foo')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchBucket')
+ assert status == 404
+ assert error_code == 'NoSuchBucket'
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, unauthenticated_client.get_object, Bucket=bucket_name, Key='foo')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchKey')
+ assert status == 404
+ assert error_code == 'NoSuchKey'
@attr(resource='bucket')
@attr(method='head')
client = get_client()
response = client.head_bucket(Bucket=bucket_name)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='bucket')
@attr(method='head')
e = assert_raises(ClientError, client.head_bucket, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
+ assert status == 404
# n.b., RGW does not send a response document for this operation,
# which seems consistent with
# https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadBucket.html
- #eq(error_code, 'NoSuchKey')
+ #assert error_code == 'NoSuchKey'
@attr('fails_on_aws')
@pytest.mark.fails_on_aws
client = get_client()
response = client.head_bucket(Bucket=bucket_name)
- eq(int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-object-count']), 0)
- eq(int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-bytes-used']), 0)
+ assert int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-object-count']) == 0
+ assert int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-bytes-used']) == 0
_create_objects(bucket_name=bucket_name, keys=['foo','bar','baz'])
response = client.head_bucket(Bucket=bucket_name)
- eq(int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-object-count']), 3)
- eq(int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-bytes-used']), 9)
+ assert int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-object-count']) == 3
+ assert int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-bytes-used']) == 9
@attr(resource='bucket.acl')
@attr(method='get')
unauthenticated_client = get_unauthenticated_client()
response = unauthenticated_client.get_object(Bucket=bucket_name, Key='foo')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='object.acl')
@attr(method='get')
unauthenticated_client = get_unauthenticated_client()
e = assert_raises(ClientError, unauthenticated_client.get_object, Bucket=bucket_name, Key='foo')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
@attr(resource='object')
@attr(method='ACLs')
client = get_client()
response = client.get_object(Bucket=bucket_name, Key='foo')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='object')
@attr(method='get')
client = get_client()
response = client.get_object(Bucket=bucket_name, Key='foo', ResponseCacheControl='no-cache', ResponseContentDisposition='bla', ResponseContentEncoding='aaa', ResponseContentLanguage='esperanto', ResponseContentType='foo/bar', ResponseExpires='123')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
- eq(response['ResponseMetadata']['HTTPHeaders']['content-type'], 'foo/bar')
- eq(response['ResponseMetadata']['HTTPHeaders']['content-disposition'], 'bla')
- eq(response['ResponseMetadata']['HTTPHeaders']['content-language'], 'esperanto')
- eq(response['ResponseMetadata']['HTTPHeaders']['content-encoding'], 'aaa')
- eq(response['ResponseMetadata']['HTTPHeaders']['cache-control'], 'no-cache')
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
+ assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == 'foo/bar'
+ assert response['ResponseMetadata']['HTTPHeaders']['content-disposition'] == 'bla'
+ assert response['ResponseMetadata']['HTTPHeaders']['content-language'] == 'esperanto'
+ assert response['ResponseMetadata']['HTTPHeaders']['content-encoding'] == 'aaa'
+ assert response['ResponseMetadata']['HTTPHeaders']['cache-control'] == 'no-cache'
@attr(resource='object')
@attr(method='ACLs')
client = get_client()
response = client.get_object(Bucket=bucket_name, Key='foo')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='object')
@attr(method='ACLs')
client = get_client()
response = client.get_object(Bucket=bucket_name, Key='foo')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchBucket')
+ assert status == 404
+ assert error_code == 'NoSuchBucket'
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchKey')
+ assert status == 404
+ assert error_code == 'NoSuchKey'
@attr(resource='object')
@attr(method='get')
url = client.generate_presigned_url(ClientMethod='get_object', Params=params, ExpiresIn=100000, HttpMethod='GET')
res = requests.get(url, verify=get_config_ssl_verify()).__dict__
- eq(res['status_code'], 200)
+ assert res['status_code'] == 200
@attr(resource='object')
@attr(method='get')
url = client.generate_presigned_url(ClientMethod='get_object', Params=params, ExpiresIn=0, HttpMethod='GET')
res = requests.get(url, verify=get_config_ssl_verify()).__dict__
- eq(res['status_code'], 403)
+ assert res['status_code'] == 403
@attr(resource='object')
@attr(method='get')
url = client.generate_presigned_url(ClientMethod='get_object', Params=params, ExpiresIn=609901, HttpMethod='GET')
res = requests.get(url, verify=get_config_ssl_verify()).__dict__
- eq(res['status_code'], 403)
+ assert res['status_code'] == 403
@attr(resource='object')
@attr(method='get')
url = client.generate_presigned_url(ClientMethod='get_object', Params=params, ExpiresIn=-7, HttpMethod='GET')
res = requests.get(url, verify=get_config_ssl_verify()).__dict__
- eq(res['status_code'], 403)
+ assert res['status_code'] == 403
@attr(resource='object')
e = assert_raises(ClientError, unauthenticated_client.put_object, Bucket=bucket_name, Key='foo', Body='foo')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
@attr(resource='object')
@attr(method='put')
unauthenticated_client = get_unauthenticated_client()
response = unauthenticated_client.put_object(Bucket=bucket_name, Key='foo', Body='foo')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='object')
@attr(method='put')
client = get_client()
response = client.put_object(Bucket=bucket_name, Key='foo', Body='foo')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='object')
@attr(method='put')
# params wouldn't take a 'Body' parameter so we're passing it in here
res = requests.put(url, data="foo", verify=get_config_ssl_verify()).__dict__
- eq(res['status_code'], 403)
+ assert res['status_code'] == 403
def check_bad_bucket_name(bucket_name):
"""
client = get_client()
e = assert_raises(ClientError, client.create_bucket, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidBucketName')
+ assert status == 400
+ assert error_code == 'InvalidBucketName'
# AWS does not enforce all documented bucket restrictions.
)
client = get_client()
response = client.create_bucket(Bucket=bucket_name)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
def _test_bucket_create_naming_good_long(length):
"""
)
client = get_client()
response = client.create_bucket(Bucket=bucket_name)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
# Breaks DNS with SubdomainCallingFormat
@attr('fails_with_subdomain')
)
bucket = get_new_bucket_resource(name=bucket_name)
is_empty = _bucket_is_empty(bucket)
- eq(is_empty, True)
+ assert is_empty == True
# AWS does not enforce all documented bucket restrictions.
# http://docs.amazonwebservices.com/AmazonS3/2006-03-01/dev/index.html?BucketRestrictions.html
def test_bucket_create_naming_dns_underscore():
invalid_bucketname = 'foo_bar'
status, error_code = check_invalid_bucketname(invalid_bucketname)
- eq(status, 400)
- eq(error_code, 'InvalidBucketName')
+ assert status == 400
+ assert error_code == 'InvalidBucketName'
# Breaks DNS with SubdomainCallingFormat
@attr('fails_with_subdomain')
def test_bucket_create_naming_dns_dash_at_end():
invalid_bucketname = 'foo-'
status, error_code = check_invalid_bucketname(invalid_bucketname)
- eq(status, 400)
- eq(error_code, 'InvalidBucketName')
+ assert status == 400
+ assert error_code == 'InvalidBucketName'
# Breaks DNS with SubdomainCallingFormat
def test_bucket_create_naming_dns_dot_dot():
invalid_bucketname = 'foo..bar'
status, error_code = check_invalid_bucketname(invalid_bucketname)
- eq(status, 400)
- eq(error_code, 'InvalidBucketName')
+ assert status == 400
+ assert error_code == 'InvalidBucketName'
# Breaks DNS with SubdomainCallingFormat
def test_bucket_create_naming_dns_dot_dash():
invalid_bucketname = 'foo.-bar'
status, error_code = check_invalid_bucketname(invalid_bucketname)
- eq(status, 400)
- eq(error_code, 'InvalidBucketName')
+ assert status == 400
+ assert error_code == 'InvalidBucketName'
# Breaks DNS with SubdomainCallingFormat
def test_bucket_create_naming_dns_dash_dot():
invalid_bucketname = 'foo-.bar'
status, error_code = check_invalid_bucketname(invalid_bucketname)
- eq(status, 400)
- eq(error_code, 'InvalidBucketName')
+ assert status == 400
+ assert error_code == 'InvalidBucketName'
@attr(resource='bucket')
@attr(method='put')
response = client.create_bucket(Bucket=bucket_name)
except ClientError as e:
status, error_code = _get_status_and_error_code(e.response)
- eq(e.status, 409)
- eq(e.error_code, 'BucketAlreadyOwnedByYou')
+ assert e.status == 409
+ assert e.error_code == 'BucketAlreadyOwnedByYou'
@attr(resource='bucket')
@attr(method='get')
response = client.get_bucket_location(Bucket=bucket_name)
if location_constraint == "":
location_constraint = None
- eq(response['LocationConstraint'], location_constraint)
+ assert response['LocationConstraint'] == location_constraint
@attr(resource='bucket')
@attr(method='put')
client.create_bucket(Bucket=bucket_name)
e = assert_raises(ClientError, alt_client.create_bucket, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 409)
- eq(error_code, 'BucketAlreadyExists')
+ assert status == 409
+ assert error_code == 'BucketAlreadyExists'
@attr(resource='bucket')
@attr(method='put')
client.create_bucket(Bucket=bucket_name, ACL='public-read')
e = assert_raises(ClientError, client.create_bucket, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 409)
- eq(error_code, 'BucketAlreadyExists')
+ assert status == 409
+ assert error_code == 'BucketAlreadyExists'
@attr(resource='bucket')
@attr(method='put')
client.create_bucket(Bucket=bucket_name)
e = assert_raises(ClientError, client.create_bucket, Bucket=bucket_name, ACL='public-read')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 409)
- eq(error_code, 'BucketAlreadyExists')
+ assert status == 409
+ assert error_code == 'BucketAlreadyExists'
def check_access_denied(fn, *args, **kwargs):
e = assert_raises(ClientError, fn, *args, **kwargs)
status = _get_status(e.response)
- eq(status, 403)
+ assert status == 403
def check_grants(got, want):
Check that grants list in got matches the dictionaries in want,
in any order.
"""
- eq(len(got), len(want))
+ assert len(got) == len(want)
# There are instances when got does not match due the order of item.
if got[0]["Grantee"].get("DisplayName"):
for g, w in zip(got, want):
w = dict(w)
g = dict(g)
- eq(g.pop('Permission', None), w['Permission'])
- eq(g['Grantee'].pop('DisplayName', None), w['DisplayName'])
- eq(g['Grantee'].pop('ID', None), w['ID'])
- eq(g['Grantee'].pop('Type', None), w['Type'])
- eq(g['Grantee'].pop('URI', None), w['URI'])
- eq(g['Grantee'].pop('EmailAddress', None), w['EmailAddress'])
- eq(g, {'Grantee': {}})
+ assert g.pop('Permission', None) == w['Permission']
+ assert g['Grantee'].pop('DisplayName', None) == w['DisplayName']
+ assert g['Grantee'].pop('ID', None) == w['ID']
+ assert g['Grantee'].pop('Type', None) == w['Type']
+ assert g['Grantee'].pop('URI', None) == w['URI']
+ assert g['Grantee'].pop('EmailAddress', None) == w['EmailAddress']
+ assert g == {'Grantee': {}}
@attr(resource='bucket')
display_name = get_main_display_name()
user_id = get_main_user_id()
- eq(response['Owner']['DisplayName'], display_name)
- eq(response['Owner']['ID'], user_id)
+ assert response['Owner']['DisplayName'] == display_name
+ assert response['Owner']['ID'] == user_id
grants = response['Grants']
check_grants(
alt_client.put_object_acl(Bucket=bucket_name, Key='foo', AccessControlPolicy=grant)
response = alt_client.get_object_acl(Bucket=bucket_name, Key='foo')
- eq(response['Owner']['ID'], main_user_id)
+ assert response['Owner']['ID'] == main_user_id
def add_obj_user_grant(bucket_name, key, grant):
"""
main_client.put_object_acl(Bucket=bucket_name, Key='foo', AccessControlPolicy=grants)
response = main_client.get_object(Bucket=bucket_name, Key='foo')
- eq(content_type, response['ContentType'])
- eq(etag, response['ETag'])
+ assert content_type == response['ContentType']
+ assert etag == response['ETag']
@attr(resource='bucket')
@attr(method='ACLs')
client = get_client()
response = client.put_bucket_acl(Bucket=bucket_name, ACL='private')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
def add_bucket_user_grant(bucket_name, grant):
"""
main_display_name = get_main_display_name()
main_user_id = get_main_user_id()
- eq(owner_id, main_user_id)
- eq(owner_display_name, main_display_name)
+ assert owner_id == main_user_id
+ assert owner_display_name == main_display_name
@attr(resource='bucket')
@attr(method='ACLs')
e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name, AccessControlPolicy=grant)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidArgument')
+ assert status == 400
+ assert error_code == 'InvalidArgument'
@attr(resource='bucket')
@attr(method='ACLs')
e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name, AccessControlPolicy = grant)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'UnresolvableGrantByEmailAddress')
+ assert status == 400
+ assert error_code == 'UnresolvableGrantByEmailAddress'
@attr(resource='bucket')
@attr(method='ACLs')
response = client.get_bucket_acl(Bucket=bucket_name)
- eq(len(response['Grants']), 0)
+ assert len(response['Grants']) == 0
# set policy back to original so that bucket can be cleaned up
policy['Grants'] = old_grants
body = _get_body(response)
# a should be public-read, b gets default (private)
- eq(body, 'foocontent')
+ assert body == 'foocontent'
check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite')
alt_client2 = get_alt_client()
body = _get_body(response)
# a should be public-read, b gets default (private)
- eq(body, 'foocontent')
+ assert body == 'foocontent'
check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite')
alt_client2 = get_alt_client()
# a should be public-read-only ... because it is in a private bucket
# b gets default (private)
- eq(body, 'foocontent')
+ assert body == 'foocontent'
check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite')
alt_client2 = get_alt_client()
# a should be public-read-only ... because it is in a private bucket
# b gets default (private)
- eq(body, 'foocontent')
+ assert body == 'foocontent'
check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite')
alt_client2 = get_alt_client()
objs = get_objects_list(bucket=bucket_name, client=alt_client3)
- eq(objs, ['bar', 'foo'])
+ assert objs == ['bar', 'foo']
check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent')
@attr(resource='object')
# a should be public-read, b gets default (private)
body = _get_body(response)
- eq(body, 'foocontent')
+ assert body == 'foocontent'
check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite')
objs = get_objects_list(bucket=bucket_name, client=alt_client3)
- eq(objs, ['bar', 'foo'])
+ assert objs == ['bar', 'foo']
check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent')
# a should be public-read-only ... because it is in a r/o bucket
# b gets default (private)
- eq(body, 'foocontent')
+ assert body == 'foocontent'
check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite')
objs = get_objects_list(bucket=bucket_name, client=alt_client3)
- eq(objs, ['bar', 'foo'])
+ assert objs == ['bar', 'foo']
check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent')
alt_client.put_object(Bucket=bucket_name, Key=key2, Body='baroverwrite')
objs = get_objects_list(bucket=bucket_name, client=alt_client)
- eq(objs, ['bar', 'foo'])
+ assert objs == ['bar', 'foo']
alt_client.put_object(Bucket=bucket_name, Key=newkey, Body='newcontent')
@attr(resource='object')
response = alt_client.get_object(Bucket=bucket_name, Key=key1)
body = _get_body(response)
- eq(body, 'foocontent')
+ assert body == 'foocontent'
alt_client.put_object(Bucket=bucket_name, Key=key1, Body='barcontent')
check_access_denied(alt_client.get_object, Bucket=bucket_name, Key=key2)
alt_client.put_object(Bucket=bucket_name, Key=key2, Body='baroverwrite')
objs = get_objects_list(bucket=bucket_name, client=alt_client)
- eq(objs, ['bar', 'foo'])
+ assert objs == ['bar', 'foo']
alt_client.put_object(Bucket=bucket_name, Key=newkey, Body='newcontent')
@attr(resource='object')
body = _get_body(response)
# a should be public-read-write, b gets default (private)
- eq(body, 'foocontent')
+ assert body == 'foocontent'
alt_client.put_object(Bucket=bucket_name, Key=key1, Body='foooverwrite')
check_access_denied(alt_client.get_object, Bucket=bucket_name, Key=key2)
alt_client.put_object(Bucket=bucket_name, Key=key2, Body='baroverwrite')
objs = get_objects_list(bucket=bucket_name, client=alt_client)
- eq(objs, ['bar', 'foo'])
+ assert objs == ['bar', 'foo']
alt_client.put_object(Bucket=bucket_name, Key=newkey, Body='newcontent')
@attr(resource='bucket')
# allowing us to vary the calling format in testing.
unauthenticated_client = get_unauthenticated_client()
response = unauthenticated_client.list_buckets()
- eq(len(response['Buckets']), 0)
+ assert len(response['Buckets']) == 0
@attr(resource='bucket')
@attr(method='get')
bad_auth_client = get_bad_auth_client()
e = assert_raises(ClientError, bad_auth_client.list_buckets)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'InvalidAccessKeyId')
+ assert status == 403
+ assert error_code == 'InvalidAccessKeyId'
@attr(resource='bucket')
@attr(method='get')
bad_auth_client = get_bad_auth_client(aws_access_key_id=main_access_key)
e = assert_raises(ClientError, bad_auth_client.list_buckets)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'SignatureDoesNotMatch')
+ assert status == 403
+ assert error_code == 'SignatureDoesNotMatch'
@pytest.fixture
def override_prefix_a():
bucket_name = _create_objects(keys=key_names)
objs_list = get_objects_list(bucket_name)
- eq(key_names, objs_list)
+ assert key_names == objs_list
client = get_client()
client.create_bucket(Bucket=bucket_name)
objs_list = get_objects_list(bucket_name)
- eq(key_names, objs_list)
+ assert key_names == objs_list
@attr(resource='object')
@attr(method='put')
bucket_name = _create_objects(keys=key_names)
objs_list = get_objects_list(bucket_name)
- eq(key_names, objs_list)
+ assert key_names == objs_list
client = get_client()
for name in key_names:
- eq((name in objs_list), True)
+ assert name in objs_list
response = client.get_object(Bucket=bucket_name, Key=name)
body = _get_body(response)
- eq(name, body)
+ assert name == body
client.put_object_acl(Bucket=bucket_name, Key=name, ACL='private')
@attr(resource='bucket')
objs_list = get_objects_list(bucket_name)
- eq(len(objs_list), 5)
+ assert len(objs_list) == 5
objs_list = get_objects_list(bucket_name, prefix='_bla/')
- eq(len(objs_list), 4)
+ assert len(objs_list) == 4
@attr(resource='object')
@attr(method='put')
client.copy(copy_source, bucket_name, 'bar321foo')
response = client.get_object(Bucket=bucket_name, Key='bar321foo')
- eq(response['ContentLength'], 0)
+ assert response['ContentLength'] == 0
@attr(resource='object')
@attr(method='put')
key2 = 'obj2'
client.copy_object(Bucket=bucket_name, Key=key2, CopySource=copy_source)
response = client.get_object(Bucket=bucket_name, Key=key2)
- eq(response['ContentLength'], 16*1024*1024)
+ assert response['ContentLength'] == 16*1024*1024
@attr(resource='object')
@attr(method='put')
response = client.get_object(Bucket=bucket_name, Key='bar321foo')
body = _get_body(response)
- eq('foo', body)
+ assert 'foo' == body
@attr(resource='object')
@attr(method='put')
response = client.get_object(Bucket=bucket_name, Key='bar321foo')
body = _get_body(response)
- eq('foo', body)
+ assert 'foo' == body
response_content_type = response['ContentType']
- eq(response_content_type, content_type)
+ assert response_content_type == content_type
@attr(resource='object')
@attr(method='put')
e = assert_raises(ClientError, client.copy, copy_source, bucket_name, 'foo123bar')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidRequest')
+ assert status == 400
+ assert error_code == 'InvalidRequest'
@attr(resource='object')
@attr(method='put')
client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key='foo123bar', Metadata=metadata, MetadataDirective='REPLACE')
response = client.get_object(Bucket=bucket_name, Key='foo123bar')
- eq(response['Metadata'], metadata)
+ assert response['Metadata'] == metadata
@attr(resource='object')
@attr(method='put')
response = client.get_object(Bucket=bucket_name2, Key='bar321foo')
body = _get_body(response)
- eq('foo', body)
+ assert 'foo' == body
@attr(resource='object')
@attr(method='put')
e = assert_raises(ClientError, alt_client.copy, copy_source, bucket_name2, 'bar321foo')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
@attr(resource='object')
@attr(method='put')
client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key='bar321foo')
response = client.get_object(Bucket=bucket_name, Key='bar321foo')
- eq(content_type, response['ContentType'])
- eq(metadata, response['Metadata'])
+ assert content_type == response['ContentType']
+ assert metadata == response['Metadata']
body = _get_body(response)
- eq(size, response['ContentLength'])
+ assert size == response['ContentLength']
@attr(resource='object')
@attr(method='put')
client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key='bar321foo', Metadata=metadata, MetadataDirective='REPLACE', ContentType=content_type)
response = client.get_object(Bucket=bucket_name, Key='bar321foo')
- eq(content_type, response['ContentType'])
- eq(metadata, response['Metadata'])
- eq(size, response['ContentLength'])
+ assert content_type == response['ContentType']
+ assert metadata == response['Metadata']
+ assert size == response['ContentLength']
@attr(resource='object')
@attr(method='put')
copy_source = {'Bucket': bucket_name + "-fake", 'Key': 'foo123bar'}
e = assert_raises(ClientError, client.copy, copy_source, bucket_name, 'bar321foo')
status = _get_status(e.response)
- eq(status, 404)
+ assert status == 404
@attr(resource='object')
@attr(method='put')
copy_source = {'Bucket': bucket_name, 'Key': 'foo123bar'}
e = assert_raises(ClientError, client.copy, copy_source, bucket_name, 'bar321foo')
status = _get_status(e.response)
- eq(status, 404)
+ assert status == 404
@attr(resource='object')
@attr(method='put')
client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key=key2)
response = client.get_object(Bucket=bucket_name, Key=key2)
body = _get_body(response)
- eq(data_str, body)
- eq(size, response['ContentLength'])
+ assert data_str == body
+ assert size == response['ContentLength']
# second copy
client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key=key3)
response = client.get_object(Bucket=bucket_name, Key=key3)
body = _get_body(response)
- eq(data_str, body)
- eq(size, response['ContentLength'])
+ assert data_str == body
+ assert size == response['ContentLength']
# copy to another versioned bucket
bucket_name2 = get_new_bucket()
client.copy_object(Bucket=bucket_name2, CopySource=copy_source, Key=key4)
response = client.get_object(Bucket=bucket_name2, Key=key4)
body = _get_body(response)
- eq(data_str, body)
- eq(size, response['ContentLength'])
+ assert data_str == body
+ assert size == response['ContentLength']
# copy to another non versioned bucket
bucket_name3 = get_new_bucket()
client.copy_object(Bucket=bucket_name3, CopySource=copy_source, Key=key5)
response = client.get_object(Bucket=bucket_name3, Key=key5)
body = _get_body(response)
- eq(data_str, body)
- eq(size, response['ContentLength'])
+ assert data_str == body
+ assert size == response['ContentLength']
# copy from a non versioned bucket
copy_source = {'Bucket': bucket_name3, 'Key': key5}
client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key=key6)
response = client.get_object(Bucket=bucket_name, Key=key6)
body = _get_body(response)
- eq(data_str, body)
- eq(size, response['ContentLength'])
+ assert data_str == body
+ assert size == response['ContentLength']
@attr(resource='object')
@attr(method='put')
response = client.get_object(Bucket=bucket_name, Key=key2)
version_id2 = response['VersionId']
body = _get_body(response)
- eq(data, body)
- eq(key1_size, response['ContentLength'])
- eq(key1_metadata, response['Metadata'])
- eq(content_type, response['ContentType'])
+ assert data == body
+ assert key1_size == response['ContentLength']
+ assert key1_metadata == response['Metadata']
+ assert content_type == response['ContentType']
# second copy
copy_source = {'Bucket': bucket_name, 'Key': key2, 'VersionId': version_id2}
client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key=key3)
response = client.get_object(Bucket=bucket_name, Key=key3)
body = _get_body(response)
- eq(data, body)
- eq(key1_size, response['ContentLength'])
- eq(key1_metadata, response['Metadata'])
- eq(content_type, response['ContentType'])
+ assert data == body
+ assert key1_size == response['ContentLength']
+ assert key1_metadata == response['Metadata']
+ assert content_type == response['ContentType']
# copy to another versioned bucket
bucket_name2 = get_new_bucket()
client.copy_object(Bucket=bucket_name2, CopySource=copy_source, Key=key4)
response = client.get_object(Bucket=bucket_name2, Key=key4)
body = _get_body(response)
- eq(data, body)
- eq(key1_size, response['ContentLength'])
- eq(key1_metadata, response['Metadata'])
- eq(content_type, response['ContentType'])
+ assert data == body
+ assert key1_size == response['ContentLength']
+ assert key1_metadata == response['Metadata']
+ assert content_type == response['ContentType']
# copy to another non versioned bucket
bucket_name3 = get_new_bucket()
client.copy_object(Bucket=bucket_name3, CopySource=copy_source, Key=key5)
response = client.get_object(Bucket=bucket_name3, Key=key5)
body = _get_body(response)
- eq(data, body)
- eq(key1_size, response['ContentLength'])
- eq(key1_metadata, response['Metadata'])
- eq(content_type, response['ContentType'])
+ assert data == body
+ assert key1_size == response['ContentLength']
+ assert key1_metadata == response['Metadata']
+ assert content_type == response['ContentType']
# copy from a non versioned bucket
copy_source = {'Bucket': bucket_name3, 'Key': key5}
client.copy_object(Bucket=bucket_name3, CopySource=copy_source, Key=key6)
response = client.get_object(Bucket=bucket_name3, Key=key6)
body = _get_body(response)
- eq(data, body)
- eq(key1_size, response['ContentLength'])
- eq(key1_metadata, response['Metadata'])
- eq(content_type, response['ContentType'])
+ assert data == body
+ assert key1_size == response['ContentLength']
+ assert key1_metadata == response['Metadata']
+ assert content_type == response['ContentType']
@attr(resource='object')
@attr(method='put')
(upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key1, size=objlen)
e = assert_raises(ClientError, client.complete_multipart_upload,Bucket=bucket_name, Key=key1, UploadId=upload_id)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'MalformedXML')
+ assert status == 400
+ assert error_code == 'MalformedXML'
@attr(resource='object')
@attr(method='put')
(upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key1, size=objlen)
response = client.complete_multipart_upload(Bucket=bucket_name, Key=key1, UploadId=upload_id, MultipartUpload={'Parts': parts})
response = client.get_object(Bucket=bucket_name, Key=key1)
- eq(response['ContentLength'], objlen)
+ assert response['ContentLength'] == objlen
# check extra client.complete_multipart_upload
response = client.complete_multipart_upload(Bucket=bucket_name, Key=key1, UploadId=upload_id, MultipartUpload={'Parts': parts})
else:
response = client.get_object(Bucket=src_bucket_name, Key=src_key, Range=r, VersionId=version_id)
src_data = _get_body(response)
- eq(src_data, dest_data)
+ assert src_data == dest_data
@attr(resource='object')
@attr(method='put')
client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts})
response = client.get_object(Bucket=dest_bucket_name, Key=dest_key)
- eq(size, response['ContentLength'])
+ assert size == response['ContentLength']
_check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name)
@attr(resource='object')
valid_status = [400, 416]
if not status in valid_status:
raise AssertionError("Invalid response " + str(status))
- eq(error_code, 'InvalidRange')
+ assert error_code == 'InvalidRange'
@attr(resource='object')
CopySourceRange=test_range,
PartNumber=1)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidArgument')
+ assert status == 400
+ assert error_code == 'InvalidArgument'
@attr(resource='object')
client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts})
response = client.get_object(Bucket=dest_bucket_name, Key=dest_key)
- eq(response['ContentLength'], 10)
+ assert response['ContentLength'] == 10
_check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name)
@attr(resource='object')
(upload_id, parts) = _multipart_copy(src_bucket_name, src_key, dest_bucket_name, dest_key, size)
response = client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts})
response = client.get_object(Bucket=dest_bucket_name, Key=dest_key)
- eq(size, response['ContentLength'])
+ assert size == response['ContentLength']
_check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name)
def _check_content_using_range(key, bucket_name, data, step):
end = ofs + toread - 1
r = 'bytes={s}-{e}'.format(s=ofs, e=end)
response = client.get_object(Bucket=bucket_name, Key=key, Range=r)
- eq(response['ContentLength'], toread)
+ assert response['ContentLength'] == toread
body = _get_body(response)
- eq(body, data[ofs:end+1])
+ assert body == data[ofs:end+1]
@attr(resource='object')
@attr(method='put')
response = client.head_bucket(Bucket=bucket_name)
rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', objlen))
- eq(rgw_bytes_used, objlen)
+ assert rgw_bytes_used == objlen
rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 1))
- eq(rgw_object_count, 1)
+ assert rgw_object_count == 1
response = client.get_object(Bucket=bucket_name, Key=key)
- eq(response['ContentType'], content_type)
- eq(response['Metadata'], metadata)
+ assert response['ContentType'] == content_type
+ assert response['Metadata'] == metadata
body = _get_body(response)
- eq(len(body), response['ContentLength'])
- eq(body, data)
+ assert len(body) == response['ContentLength']
+ assert body == data
_check_content_using_range(key, bucket_name, data, 1000000)
_check_content_using_range(key, bucket_name, data, 10000000)
try:
response = client.get_bucket_versioning(Bucket=bucket_name)
- eq(response['Status'], status)
+ assert response['Status'] == status
except KeyError:
- eq(status, None)
+ assert status == None
# amazon is eventual consistent, retry a bit if failed
def check_configure_versioning_retry(bucket_name, status, expected_string):
time.sleep(1)
- eq(expected_string, read_status)
+ assert expected_string == read_status
@attr(resource='object')
@attr(method='put')
(upload_id, parts) = _multipart_copy(src_bucket_name, src_key, dest_bucket_name, dest_key, size, version_id=vid)
response = client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts})
response = client.get_object(Bucket=dest_bucket_name, Key=dest_key)
- eq(size, response['ContentLength'])
+ assert size == response['ContentLength']
_check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name, version_id=vid)
def _check_upload_multipart_resend(bucket_name, key, objlen, resend_parts):
client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts})
response = client.get_object(Bucket=bucket_name, Key=key)
- eq(response['ContentType'], content_type)
- eq(response['Metadata'], metadata)
+ assert response['ContentType'] == content_type
+ assert response['Metadata'] == metadata
body = _get_body(response)
- eq(len(body), response['ContentLength'])
- eq(body, data)
+ assert len(body) == response['ContentLength']
+ assert body == data
_check_content_using_range(key, bucket_name, data, 1000000)
_check_content_using_range(key, bucket_name, data, 10000000)
(upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key, size=size, part_size=10*1024)
e = assert_raises(ClientError, client.complete_multipart_upload, Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts})
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'EntityTooSmall')
+ assert status == 400
+ assert error_code == 'EntityTooSmall'
def gen_rand_string(size, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
response = client.head_bucket(Bucket=bucket_name)
rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', 0))
- eq(rgw_bytes_used, 0)
+ assert rgw_bytes_used == 0
rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 0))
- eq(rgw_object_count, 0)
+ assert rgw_object_count == 0
@attr(resource='object')
@attr(method='put')
e = assert_raises(ClientError, client.abort_multipart_upload, Bucket=bucket_name, Key=key, UploadId='56788')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchUpload')
+ assert status == 404
+ assert error_code == 'NoSuchUpload'
@attr(resource='object')
@attr(method='put')
resp_uploadids.append(uploads[i]['UploadId'])
for i in range(0, len(upload_ids)):
- eq(True, (upload_ids[i] in resp_uploadids))
+ assert True == (upload_ids[i] in resp_uploadids)
client.abort_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id1)
client.abort_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id2)
try:
# match fields of an Upload from ListMultipartUploadsResult
def match(upload, key, uploadid, userid, username):
- eq(upload['Key'], key)
- eq(upload['UploadId'], uploadid)
- eq(upload['Initiator']['ID'], userid)
- eq(upload['Initiator']['DisplayName'], username)
- eq(upload['Owner']['ID'], userid)
- eq(upload['Owner']['DisplayName'], username)
+ assert upload['Key'] == key
+ assert upload['UploadId'] == uploadid
+ assert upload['Initiator']['ID'] == userid
+ assert upload['Initiator']['DisplayName'] == username
+ assert upload['Owner']['ID'] == userid
+ assert upload['Owner']['DisplayName'] == username
# list uploads with client1
uploads1 = client1.list_multipart_uploads(Bucket=bucket_name)['Uploads']
- eq(len(uploads1), 2)
+ assert len(uploads1) == 2
match(uploads1[0], key1, upload1, user1, name1)
match(uploads1[1], key2, upload2, user2, name2)
# list uploads with client2
uploads2 = client2.list_multipart_uploads(Bucket=bucket_name)['Uploads']
- eq(len(uploads2), 2)
+ assert len(uploads2) == 2
match(uploads2[0], key1, upload1, user1, name1)
match(uploads2[1], key2, upload2, user2, name2)
finally:
e = assert_raises(ClientError, client.complete_multipart_upload, Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts})
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidPart')
+ assert status == 400
+ assert error_code == 'InvalidPart'
@attr(resource='object')
@attr(method='put')
e = assert_raises(ClientError, client.complete_multipart_upload, Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts})
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidPart')
+ assert status == 400
+ assert error_code == 'InvalidPart'
def _simple_http_req_100_cont(host, port, is_secure, method, resource):
"""
#NOTES: this test needs to be tested when is_secure is True
status = _simple_http_req_100_cont(host, port, is_secure, 'PUT', resource)
- eq(status, '403')
+ assert status == '403'
client.put_bucket_acl(Bucket=bucket_name, ACL='public-read-write')
status = _simple_http_req_100_cont(host, port, is_secure, 'PUT', resource)
- eq(status, '100')
+ assert status == '100'
@attr(resource='bucket')
@attr(method='put')
e = assert_raises(ClientError, client.get_bucket_cors, Bucket=bucket_name)
status = _get_status(e.response)
- eq(status, 404)
+ assert status == 404
client.put_bucket_cors(Bucket=bucket_name, CORSConfiguration=cors_config)
response = client.get_bucket_cors(Bucket=bucket_name)
- eq(response['CORSRules'][0]['AllowedMethods'], allowed_methods)
- eq(response['CORSRules'][0]['AllowedOrigins'], allowed_origins)
+ assert response['CORSRules'][0]['AllowedMethods'] == allowed_methods
+ assert response['CORSRules'][0]['AllowedOrigins'] == allowed_origins
client.delete_bucket_cors(Bucket=bucket_name)
e = assert_raises(ClientError, client.get_bucket_cors, Bucket=bucket_name)
status = _get_status(e.response)
- eq(status, 404)
+ assert status == 404
def _cors_request_and_check(func, url, headers, expect_status, expect_allow_origin, expect_allow_methods):
r = func(url, headers=headers, verify=get_config_ssl_verify())
- eq(r.status_code, expect_status)
+ assert r.status_code == expect_status
assert r.headers.get('access-control-allow-origin', None) == expect_allow_origin
assert r.headers.get('access-control-allow-methods', None) == expect_allow_methods
e = assert_raises(ClientError, client.get_bucket_cors, Bucket=bucket_name)
status = _get_status(e.response)
- eq(status, 404)
+ assert status == 404
client.put_bucket_cors(Bucket=bucket_name, CORSConfiguration=cors_config)
e = assert_raises(ClientError, client.get_bucket_cors, Bucket=bucket_name)
status = _get_status(e.response)
- eq(status, 404)
+ assert status == 404
client.put_bucket_cors(Bucket=bucket_name, CORSConfiguration=cors_config)
e = assert_raises(ClientError, client.get_bucket_cors, Bucket=bucket_name)
status = _get_status(e.response)
- eq(status, 404)
+ assert status == 404
client.put_bucket_cors(Bucket=bucket_name, CORSConfiguration=cors_config)
e = assert_raises(ClientError, client.get_bucket_tagging, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchTagSet')
+ assert status == 404
+ assert error_code == 'NoSuchTagSet'
client.put_bucket_tagging(Bucket=bucket_name, Tagging=tags)
response = client.get_bucket_tagging(Bucket=bucket_name)
- eq(len(response['TagSet']), 1)
- eq(response['TagSet'][0]['Key'], 'Hello')
- eq(response['TagSet'][0]['Value'], 'World')
+ assert len(response['TagSet']) == 1
+ assert response['TagSet'][0]['Key'] == 'Hello'
+ assert response['TagSet'][0]['Value'] == 'World'
response = client.delete_bucket_tagging(Bucket=bucket_name)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 204)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 204
e = assert_raises(ClientError, client.get_bucket_tagging, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchTagSet')
+ assert status == 404
+ assert error_code == 'NoSuchTagSet'
class FakeFile(object):
self.expected_size = size
def write(self, chars):
- eq(chars, self.char*len(chars))
+ assert chars == self.char*len(chars)
self.offset += len(chars)
self.size += len(chars)
self.interrupted = True
def close(self):
- eq(self.size, self.expected_size)
+ assert self.size == self.expected_size
class FakeFileVerifier(object):
"""
if self.char == None:
self.char = data[0]
self.size += size
- eq(data.decode(), self.char*size)
+ assert data.decode() == self.char*size
def _verify_atomic_key_data(bucket_name, key, size=-1, char=None):
"""
client = get_client()
client.download_fileobj(bucket_name, key, fp_verify)
if size >= 0:
- eq(fp_verify.size, size)
+ assert fp_verify.size == size
def _test_atomic_read(file_size):
"""
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=objname, Body=fp_c)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 412)
- eq(error_code, 'PreconditionFailed')
+ assert status == 412
+ assert error_code == 'PreconditionFailed'
# verify the file
_verify_atomic_key_data(bucket_name, objname, file_size, 'B')
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=objname, Body=fp_a)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchBucket')
+ assert status == 404
+ assert error_code == 'NoSuchBucket'
@attr(resource='object')
@attr(method='put')
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
client.abort_multipart_upload(Bucket=bucket_name, Key='foo', UploadId=upload_id)
response = client.get_object(Bucket=bucket_name, Key='foo')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
class Counter:
def __init__(self, default_val):
response = client.get_object(Bucket=bucket_name, Key='testobj', Range='bytes=4-7')
fetched_content = _get_body(response)
- eq(fetched_content, content[4:8])
- eq(response['ResponseMetadata']['HTTPHeaders']['content-range'], 'bytes 4-7/11')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 206)
+ assert fetched_content == content[4:8]
+ assert response['ResponseMetadata']['HTTPHeaders']['content-range'] == 'bytes 4-7/11'
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 206
def _generate_random_string(size):
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(size))
response = client.get_object(Bucket=bucket_name, Key='testobj', Range='bytes=3145728-5242880')
fetched_content = _get_body(response)
- eq(fetched_content, content[3145728:5242881])
- eq(response['ResponseMetadata']['HTTPHeaders']['content-range'], 'bytes 3145728-5242880/8388608')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 206)
+ assert fetched_content == content[3145728:5242881]
+ assert response['ResponseMetadata']['HTTPHeaders']['content-range'] == 'bytes 3145728-5242880/8388608'
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 206
@attr(resource='object')
@attr(method='get')
response = client.get_object(Bucket=bucket_name, Key='testobj', Range='bytes=4-')
fetched_content = _get_body(response)
- eq(fetched_content, content[4:])
- eq(response['ResponseMetadata']['HTTPHeaders']['content-range'], 'bytes 4-10/11')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 206)
+ assert fetched_content == content[4:]
+ assert response['ResponseMetadata']['HTTPHeaders']['content-range'] == 'bytes 4-10/11'
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 206
@attr(resource='object')
@attr(method='get')
response = client.get_object(Bucket=bucket_name, Key='testobj', Range='bytes=-7')
fetched_content = _get_body(response)
- eq(fetched_content, content[-7:])
- eq(response['ResponseMetadata']['HTTPHeaders']['content-range'], 'bytes 4-10/11')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 206)
+ assert fetched_content == content[-7:]
+ assert response['ResponseMetadata']['HTTPHeaders']['content-range'] == 'bytes 4-10/11'
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 206
@attr(resource='object')
@attr(method='get')
# test invalid range
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='testobj', Range='bytes=40-50')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 416)
- eq(error_code, 'InvalidRange')
+ assert status == 416
+ assert error_code == 'InvalidRange'
@attr(resource='object')
@attr(method='get')
# test invalid range
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='testobj', Range='bytes=40-50')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 416)
- eq(error_code, 'InvalidRange')
+ assert status == 416
+ assert error_code == 'InvalidRange'
@attr(resource='bucket')
@attr(method='create')
response = client.get_object(Bucket=bucket_name, Key=key, VersionId=version_id)
if content is not None:
body = _get_body(response)
- eq(body, content)
+ assert body == content
else:
- eq(response['DeleteMarker'], True)
+ assert response['DeleteMarker'] == True
def check_obj_versions(client, bucket_name, key, version_ids, contents):
# check to see if objects is pointing at correct version
i = 0
for version in versions:
- eq(version['VersionId'], version_ids[i])
- eq(version['Key'], key)
+ assert version['VersionId'] == version_ids[i]
+ assert version['Key'] == key
check_obj_content(client, bucket_name, key, version['VersionId'], contents[i])
i += 1
return (version_ids, contents)
def remove_obj_version(client, bucket_name, key, version_ids, contents, index):
- eq(len(version_ids), len(contents))
+ assert len(version_ids) == len(contents)
index = index % len(version_ids)
rm_version_id = version_ids.pop(index)
rm_content = contents.pop(index)
response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=removed_version_id)
response = client.get_object(Bucket=bucket_name, Key=key)
body = _get_body(response)
- eq(body, contents[-1])
+ assert body == contents[-1]
# add a delete marker
response = client.delete_object(Bucket=bucket_name, Key=key)
- eq(response['DeleteMarker'], True)
+ assert response['DeleteMarker'] == True
delete_marker_version_id = response['VersionId']
version_ids.append(delete_marker_version_id)
response = client.list_object_versions(Bucket=bucket_name)
- eq(len(response['Versions']), num_versions)
- eq(len(response['DeleteMarkers']), 1)
- eq(response['DeleteMarkers'][0]['VersionId'], delete_marker_version_id)
+ assert len(response['Versions']) == num_versions
+ assert len(response['DeleteMarkers']) == 1
+ assert response['DeleteMarkers'][0]['VersionId'] == delete_marker_version_id
clean_up_bucket(client, bucket_name, key, version_ids)
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchKey')
+ assert status == 404
+ assert error_code == 'NoSuchKey'
response = client.list_object_versions(Bucket=bucket_name)
- eq(('Versions' in response), False)
+ assert not 'Versions' in response
@attr(resource='object')
@attr(method='create')
response = client.put_object(Bucket=bucket_name, Key=key, Body=content2)
response = client.get_object(Bucket=bucket_name, Key=key)
body = _get_body(response)
- eq(body, content2)
+ assert body == content2
version_id = response['VersionId']
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id)
response = client.get_object(Bucket=bucket_name, Key=key)
body = _get_body(response)
- eq(body, content)
+ assert body == content
client.delete_object(Bucket=bucket_name, Key=key, VersionId='null')
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchKey')
+ assert status == 404
+ assert error_code == 'NoSuchKey'
response = client.list_object_versions(Bucket=bucket_name)
- eq(('Versions' in response), False)
+ assert not 'Versions' in response
@attr(resource='object')
@attr(method='create')
response = client.put_object(Bucket=bucket_name, Key=key, Body=content2)
response = client.get_object(Bucket=bucket_name, Key=key)
body = _get_body(response)
- eq(body, content2)
+ assert body == content2
response = client.list_object_versions(Bucket=bucket_name)
# original object with 'null' version id still counts as a version
- eq(len(response['Versions']), 1)
+ assert len(response['Versions']) == 1
client.delete_object(Bucket=bucket_name, Key=key, VersionId='null')
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchKey')
+ assert status == 404
+ assert error_code == 'NoSuchKey'
response = client.list_object_versions(Bucket=bucket_name)
- eq(('Versions' in response), False)
+ assert not 'Versions' in response
def delete_suspended_versioning_obj(client, bucket_name, key, version_ids, contents):
client.delete_object(Bucket=bucket_name, Key=key)
# clear out old null objects in lists since they will get overwritten
- eq(len(version_ids), len(contents))
+ assert len(version_ids) == len(contents)
i = 0
for version_id in version_ids:
if version_id == 'null':
client.put_object(Bucket=bucket_name, Key=key, Body=content)
# clear out old null objects in lists since they will get overwritten
- eq(len(version_ids), len(contents))
+ assert len(version_ids) == len(contents)
i = 0
for version_id in version_ids:
if version_id == 'null':
for idx in range(num_versions):
remove_obj_version(client, bucket_name, key, version_ids, contents, idx)
- eq(len(version_ids), 0)
- eq(len(version_ids), len(contents))
+ assert len(version_ids) == 0
+ assert len(version_ids) == len(contents)
@attr(resource='object')
@attr(method='remove')
for idx in range(num_versions):
remove_obj_version(client, bucket_name, key, version_ids, contents, idx)
- eq(len(version_ids), 0)
- eq(len(version_ids), len(contents))
+ assert len(version_ids) == 0
+ assert len(version_ids) == len(contents)
@attr(resource='object')
@attr(method='remove')
for idx in range(num_versions):
remove_obj_version(client, bucket_name, key, version_ids, contents, idx)
- eq(len(version_ids), 0)
- eq(len(version_ids), len(contents))
+ assert len(version_ids) == 0
+ assert len(version_ids) == len(contents)
@attr(resource='object')
@attr(method='multipart')
for idx in range(num_versions):
remove_obj_version(client, bucket_name, key, version_ids, contents, idx)
- eq(len(version_ids), 0)
- eq(len(version_ids), len(contents))
+ assert len(version_ids) == 0
+ assert len(version_ids) == len(contents)
@attr(resource='object')
@attr(method='multipart')
# test the last 5 created objects first
for i in range(5):
version = versions[i]
- eq(version['VersionId'], version_ids2[i])
- eq(version['Key'], key2)
+ assert version['VersionId'] == version_ids2[i]
+ assert version['Key'] == key2
check_obj_content(client, bucket_name, key2, version['VersionId'], contents2[i])
i += 1
# then the first 5
for j in range(5):
version = versions[i]
- eq(version['VersionId'], version_ids[j])
- eq(version['Key'], key)
+ assert version['VersionId'] == version_ids[j]
+ assert version['Key'] == key
check_obj_content(client, bucket_name, key, version['VersionId'], contents[j])
i += 1
client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key=new_key_name)
response = client.get_object(Bucket=bucket_name, Key=new_key_name)
body = _get_body(response)
- eq(body, contents[i])
+ assert body == contents[i]
another_bucket_name = get_new_bucket()
client.copy_object(Bucket=another_bucket_name, CopySource=copy_source, Key=new_key_name)
response = client.get_object(Bucket=another_bucket_name, Key=new_key_name)
body = _get_body(response)
- eq(body, contents[i])
+ assert body == contents[i]
new_key_name = 'new_key'
copy_source = {'Bucket': bucket_name, 'Key': key}
response = client.get_object(Bucket=another_bucket_name, Key=new_key_name)
body = _get_body(response)
- eq(body, contents[-1])
+ assert body == contents[-1]
@attr(resource='object')
@attr(method='delete')
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId'])
response = client.list_object_versions(Bucket=bucket_name)
- eq(('Versions' in response), False)
+ assert not 'Versions' in response
# now remove again, should all succeed due to idempotency
for version in versions:
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId'])
response = client.list_object_versions(Bucket=bucket_name)
- eq(('Versions' in response), False)
+ assert not 'Versions' in response
@attr(resource='object')
@attr(method='delete')
delete_markers = response['DeleteMarkers']
version_ids.append(delete_markers[0]['VersionId'])
- eq(len(version_ids), 3)
- eq(len(delete_markers), 1)
+ assert len(version_ids) == 3
+ assert len(delete_markers) == 1
for version in versions:
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId'])
client.delete_object(Bucket=bucket_name, Key=key, VersionId=delete_marker['VersionId'])
response = client.list_object_versions(Bucket=bucket_name)
- eq(('Versions' in response), False)
- eq(('DeleteMarkers' in response), False)
+ assert not 'Versions' in response
+ assert not 'DeleteMarkers' in response
for version in versions:
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId'])
# now remove again, should all succeed due to idempotency
response = client.list_object_versions(Bucket=bucket_name)
- eq(('Versions' in response), False)
- eq(('DeleteMarkers' in response), False)
+ assert not 'Versions' in response
+ assert not 'DeleteMarkers' in response
@attr(resource='object')
@attr(method='delete')
response = client.list_object_versions(Bucket=bucket_name)
delete_markers = response['DeleteMarkers']
- eq(len(delete_markers), 1)
- eq(delete_marker_version_id, delete_markers[0]['VersionId'])
- eq(key, delete_markers[0]['Key'])
+ assert len(delete_markers) == 1
+ assert delete_marker_version_id == delete_markers[0]['VersionId']
+ assert key == delete_markers[0]['Key']
@attr(resource='object')
@attr(method='put')
display_name = get_main_display_name()
user_id = get_main_user_id()
- eq(response['Owner']['DisplayName'], display_name)
- eq(response['Owner']['ID'], user_id)
+ assert response['Owner']['DisplayName'] == display_name
+ assert response['Owner']['ID'] == user_id
grants = response['Grants']
default_policy = [
display_name = get_main_display_name()
user_id = get_main_user_id()
- eq(response['Owner']['DisplayName'], display_name)
- eq(response['Owner']['ID'], user_id)
+ assert response['Owner']['DisplayName'] == display_name
+ assert response['Owner']['ID'] == user_id
grants = response['Grants']
default_policy = [
response = client.list_object_versions(Bucket=bucket_name)
versions = response['Versions']
- eq(len(versions), num_versions)
+ assert len(versions) == num_versions
t = _do_clear_versioned_bucket_concurrent(client, bucket_name)
_do_wait_completion(t)
response = client.list_object_versions(Bucket=bucket_name)
- eq(('Versions' in response), False)
+ assert not 'Versions' in response
@attr(resource='object')
@attr(method='put')
_do_wait_completion(t)
response = client.list_object_versions(Bucket=bucket_name)
- eq(('Versions' in response), False)
+ assert not 'Versions' in response
@attr(resource='bucket')
@attr(method='put')
{'ID': 'rule2', 'Expiration': {'Days': 2}, 'Prefix': 'test2/', 'Status':'Disabled'}]
lifecycle = {'Rules': rules}
response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='bucket')
@attr(method='get')
lifecycle = {'Rules': rules}
client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle)
response = client.get_bucket_lifecycle_configuration(Bucket=bucket_name)
- eq(response['Rules'], rules)
+ assert response['Rules'] == rules
@attr(resource='bucket')
@attr(method='get')
for lc_rule in current_lc:
if lc_rule['Prefix'] == rules['rule1'].prefix:
- eq(lc_rule['Expiration']['Days'], rules['rule1'].days)
- eq(lc_rule['Status'], rules['rule1'].status)
+ assert lc_rule['Expiration']['Days'] == rules['rule1'].days
+ assert lc_rule['Status'] == rules['rule1'].status
assert 'ID' in lc_rule
elif lc_rule['Prefix'] == rules['rule2'].prefix:
- eq(lc_rule['Expiration']['Days'], rules['rule2'].days)
- eq(lc_rule['Status'], rules['rule2'].status)
+ assert lc_rule['Expiration']['Days'] == rules['rule2'].days
+ assert lc_rule['Status'] == rules['rule2'].status
assert 'ID' in lc_rule
else:
# neither of the rules we supplied was returned, something wrong
response = client.list_objects(Bucket=bucket_name)
expire3_objects = response['Contents']
- eq(len(init_objects), 6)
- eq(len(expire1_objects), 4)
- eq(len(keep2_objects), 4)
- eq(len(expire3_objects), 2)
+ assert len(init_objects) == 6
+ assert len(expire1_objects) == 4
+ assert len(keep2_objects) == 4
+ assert len(expire3_objects) == 2
@attr(resource='bucket')
@attr(method='put')
response = client.list_objects_v2(Bucket=bucket_name)
expire3_objects = response['Contents']
- eq(len(init_objects), 6)
- eq(len(expire1_objects), 4)
- eq(len(keep2_objects), 4)
- eq(len(expire3_objects), 2)
+ assert len(init_objects) == 6
+ assert len(expire1_objects) == 4
+ assert len(keep2_objects) == 4
+ assert len(expire3_objects) == 2
@attr(resource='bucket')
@attr(method='put')
response = client.list_object_versions(Bucket=bucket_name)
versions = response['Versions']
delete_markers = response['DeleteMarkers']
- eq(len(versions), 1)
- eq(len(delete_markers), 1)
+ assert len(versions) == 1
+ assert len(delete_markers) == 1
@attr(resource='bucket')
@attr(method='put')
response = client.put_object_tagging(Bucket=bucket_name, Key=tom_key,
Tagging=tom_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
lifecycle_config = {
'Rules': [
response = client.put_bucket_lifecycle_configuration(
Bucket=bucket_name, LifecycleConfiguration=lifecycle_config)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
lc_interval = get_lc_debug_interval()
except KeyError:
expire_objects = []
- eq(len(expire_objects), 0)
+ assert len(expire_objects) == 0
# factor out common setup code
def setup_lifecycle_tags2(client, bucket_name):
response = client.put_object_tagging(Bucket=bucket_name, Key=tom_key,
Tagging=tom_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
huck_key = 'days1/huck'
huck_tagset = {
response = client.put_object_tagging(Bucket=bucket_name, Key=huck_key,
Tagging=huck_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
lifecycle_config = {
'Rules': [
response = client.put_bucket_lifecycle_configuration(
Bucket=bucket_name, LifecycleConfiguration=lifecycle_config)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
return response
@attr(resource='bucket')
response = client.list_objects(Bucket=bucket_name)
expire1_objects = response['Contents']
- eq(len(expire1_objects), 1)
+ assert len(expire1_objects) == 1
@attr(resource='bucket')
@attr(method='put')
response = client.list_objects(Bucket=bucket_name)
expire1_objects = response['Contents']
- eq(len(expire1_objects), 1)
+ assert len(expire1_objects) == 1
# setup for scenario based on vidushi mishra's in rhbz#1877737
def setup_lifecycle_noncur_tags(client, bucket_name, days):
for ix in range(10):
body = "%s v%d" % (key, ix)
response = client.put_object(Bucket=bucket_name, Key=key, Body=body)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
response = client.put_object_tagging(Bucket=bucket_name, Key=key,
Tagging=tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
lifecycle_config = {
'Rules': [
response = client.put_bucket_lifecycle_configuration(
Bucket=bucket_name, LifecycleConfiguration=lifecycle_config)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
return response
def verify_lifecycle_expiration_noncur_tags(client, bucket_name, secs):
client, bucket_name, 2*lc_interval)
# at T+20, 10 objects should exist
- eq(num_objs, 10)
+ assert num_objs == 10
num_objs = verify_lifecycle_expiration_noncur_tags(
client, bucket_name, 5*lc_interval)
# at T+60, only the current object version should exist
- eq(num_objs, 1)
+ assert num_objs == 1
@attr(resource='bucket')
@attr(method='put')
e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidArgument')
+ assert status == 400
+ assert error_code == 'InvalidArgument'
@attr(resource='bucket')
@attr(method='put')
e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidArgument')
+ assert status == 400
+ assert error_code == 'InvalidArgument'
@attr(resource='bucket')
@attr(method='put')
e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'MalformedXML')
+ assert status == 400
+ assert error_code == 'MalformedXML'
rules=[{'ID': 'rule1', 'Expiration': {'Days': 2}, 'Prefix': 'test1/', 'Status':'disabled'}]
lifecycle = {'Rules': rules}
e = assert_raises(ClientError, client.put_bucket_lifecycle, Bucket=bucket_name, LifecycleConfiguration=lifecycle)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'MalformedXML')
+ assert status == 400
+ assert error_code == 'MalformedXML'
rules=[{'ID': 'rule1', 'Expiration': {'Days': 2}, 'Prefix': 'test1/', 'Status':'invalid'}]
lifecycle = {'Rules': rules}
e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'MalformedXML')
+ assert status == 400
+ assert error_code == 'MalformedXML'
@attr(resource='bucket')
@attr(method='put')
lifecycle = {'Rules': rules}
response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='bucket')
@attr(method='put')
e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='bucket')
@attr(method='put')
response = client.list_objects(Bucket=bucket_name)
expire_objects = response['Contents']
- eq(len(init_objects), 2)
- eq(len(expire_objects), 1)
+ assert len(init_objects) == 2
+ assert len(expire_objects) == 1
@attr(resource='bucket')
@attr(method='put')
except botocore.exceptions.ClientError as e:
response_code = e.response['Error']['Code']
- eq(response_code, 'InvalidArgument')
+ assert response_code == 'InvalidArgument'
def setup_lifecycle_expiration(client, bucket_name, rule_id, delta_days,
lifecycle = {'Rules': rules}
response = client.put_bucket_lifecycle_configuration(
Bucket=bucket_name, LifecycleConfiguration=lifecycle)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
key = rule_prefix + 'foo'
body = 'bar'
response = client.put_object(Bucket=bucket_name, Key=key, Body=body)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
return response
def check_lifecycle_expiration_header(response, start_time, rule_id,
now = datetime.datetime.now(None)
response = setup_lifecycle_expiration(
client, bucket_name, 'rule1', 1, 'days1/')
- eq(check_lifecycle_expiration_header(response, now, 'rule1', 1), True)
+ assert check_lifecycle_expiration_header(response, now, 'rule1', 1)
@attr(resource='bucket')
@attr(method='head')
# stat the object, check header
response = client.head_object(Bucket=bucket_name, Key=key)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
- eq(check_lifecycle_expiration_header(response, now, 'rule1', 1), True)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
+ assert check_lifecycle_expiration_header(response, now, 'rule1', 1)
@attr(resource='bucket')
@attr(method='head')
# stat the object, check header
response = client.head_object(Bucket=bucket_name, Key=key1)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
- eq(check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1), True)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
+ assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1)
# test that header is not returning when it should not
lifecycle={
Bucket=bucket_name, LifecycleConfiguration=lifecycle)
# stat the object, check header
response = client.head_object(Bucket=bucket_name, Key=key1)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
- eq(check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1), False)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
+ assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1)
@attr(resource='bucket')
@attr(method='head')
# stat the object, check header
response = client.head_object(Bucket=bucket_name, Key=key1)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
- eq(check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1), False)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
+ assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1)
@attr(resource='bucket')
@attr(method='put')
{'ID': 'rule2', 'NoncurrentVersionExpiration': {'NoncurrentDays': 3}, 'Prefix': 'future/', 'Status':'Enabled'}]
lifecycle = {'Rules': rules}
response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='bucket')
@attr(method='put')
response = client.list_object_versions(Bucket=bucket_name)
expire_versions = response['Versions']
- eq(len(init_versions), 6)
- eq(len(expire_versions), 4)
+ assert len(init_versions) == 6
+ assert len(expire_versions) == 4
@attr(resource='bucket')
@attr(method='put')
rules=[{'ID': 'rule1', 'Expiration': {'ExpiredObjectDeleteMarker': True}, 'Prefix': 'test1/', 'Status':'Enabled'}]
lifecycle = {'Rules': rules}
response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='bucket')
@attr(method='put')
rules=[{'ID': 'rule1', 'Expiration': {'ExpiredObjectDeleteMarker': True}, 'Filter': {'Prefix': 'foo'}, 'Status':'Enabled'}]
lifecycle = {'Rules': rules}
response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='bucket')
@attr(method='put')
rules=[{'ID': 'rule1', 'Expiration': {'ExpiredObjectDeleteMarker': True}, 'Filter': {}, 'Status':'Enabled'}]
lifecycle = {'Rules': rules}
response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='bucket')
@attr(method='put')
deleted_versions = response['DeleteMarkers']
total_expire_versions = init_versions + deleted_versions
- eq(len(total_init_versions), 4)
- eq(len(total_expire_versions), 2)
+ assert len(total_init_versions) == 4
+ assert len(total_expire_versions) == 2
@attr(resource='bucket')
@attr(method='put')
]
lifecycle = {'Rules': rules}
response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='bucket')
@attr(method='put')
response = client.list_multipart_uploads(Bucket=bucket_name)
expired_uploads = response['Uploads']
- eq(len(init_uploads), 2)
- eq(len(expired_uploads), 1)
+ assert len(init_uploads) == 2
+ assert len(expired_uploads) == 1
@attr(resource='bucket')
@attr(method='put')
lifecycle = {'Rules': rules}
e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
def _test_encryption_sse_customer_write(file_size):
"""
client.meta.events.register('before-call.s3.GetObject', lf)
response = client.get_object(Bucket=bucket_name, Key=key)
body = _get_body(response)
- eq(body, data)
+ assert body == data
# The test harness for lifecycle is configured to treat days as 10 second intervals.
@attr(resource='bucket')
# Get list of all keys
response = client.list_objects(Bucket=bucket_name)
init_keys = _get_keys(response)
- eq(len(init_keys), 6)
+ assert len(init_keys) == 6
lc_interval = get_lc_debug_interval()
# Wait for first expiration (plus fudge to handle the timer window)
time.sleep(4*lc_interval)
expire1_keys = list_bucket_storage_class(client, bucket_name)
- eq(len(expire1_keys['STANDARD']), 4)
- eq(len(expire1_keys[sc[1]]), 2)
- eq(len(expire1_keys[sc[2]]), 0)
+ assert len(expire1_keys['STANDARD']) == 4
+ assert len(expire1_keys[sc[1]]) == 2
+ assert len(expire1_keys[sc[2]]) == 0
# Wait for next expiration cycle
time.sleep(lc_interval)
keep2_keys = list_bucket_storage_class(client, bucket_name)
- eq(len(keep2_keys['STANDARD']), 4)
- eq(len(keep2_keys[sc[1]]), 2)
- eq(len(keep2_keys[sc[2]]), 0)
+ assert len(keep2_keys['STANDARD']) == 4
+ assert len(keep2_keys[sc[1]]) == 2
+ assert len(keep2_keys[sc[2]]) == 0
# Wait for final expiration cycle
time.sleep(5*lc_interval)
expire3_keys = list_bucket_storage_class(client, bucket_name)
- eq(len(expire3_keys['STANDARD']), 2)
- eq(len(expire3_keys[sc[1]]), 2)
- eq(len(expire3_keys[sc[2]]), 2)
+ assert len(expire3_keys['STANDARD']) == 2
+ assert len(expire3_keys[sc[1]]) == 2
+ assert len(expire3_keys[sc[2]]) == 2
# The test harness for lifecycle is configured to treat days as 10 second intervals.
@attr(resource='bucket')
# Get list of all keys
response = client.list_objects(Bucket=bucket_name)
init_keys = _get_keys(response)
- eq(len(init_keys), 6)
+ assert len(init_keys) == 6
lc_interval = get_lc_debug_interval()
# Wait for first expiration (plus fudge to handle the timer window)
time.sleep(5*lc_interval)
expire1_keys = list_bucket_storage_class(client, bucket_name)
- eq(len(expire1_keys['STANDARD']), 4)
- eq(len(expire1_keys[sc[1]]), 2)
- eq(len(expire1_keys[sc[2]]), 0)
+ assert len(expire1_keys['STANDARD']) == 4
+ assert len(expire1_keys[sc[1]]) == 2
+ assert len(expire1_keys[sc[2]]) == 0
# Wait for next expiration cycle
time.sleep(lc_interval)
keep2_keys = list_bucket_storage_class(client, bucket_name)
- eq(len(keep2_keys['STANDARD']), 4)
- eq(len(keep2_keys[sc[1]]), 2)
- eq(len(keep2_keys[sc[2]]), 0)
+ assert len(keep2_keys['STANDARD']) == 4
+ assert len(keep2_keys[sc[1]]) == 2
+ assert len(keep2_keys[sc[2]]) == 0
# Wait for final expiration cycle
time.sleep(6*lc_interval)
expire3_keys = list_bucket_storage_class(client, bucket_name)
- eq(len(expire3_keys['STANDARD']), 4)
- eq(len(expire3_keys[sc[1]]), 0)
- eq(len(expire3_keys[sc[2]]), 2)
+ assert len(expire3_keys['STANDARD']) == 4
+ assert len(expire3_keys[sc[1]]) == 0
+ assert len(expire3_keys[sc[2]]) == 2
@attr(resource='bucket')
@attr(method='put')
lifecycle = {'Rules': rules}
response = client.put_bucket_lifecycle_configuration(Bucket=bucket, LifecycleConfiguration=lifecycle)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='bucket')
create_multiple_versions(client, bucket, "test1/b", 3)
init_keys = list_bucket_storage_class(client, bucket)
- eq(len(init_keys['STANDARD']), 6)
+ assert len(init_keys['STANDARD']) == 6
lc_interval = get_lc_debug_interval()
time.sleep(4*lc_interval)
expire1_keys = list_bucket_storage_class(client, bucket)
- eq(len(expire1_keys['STANDARD']), 2)
- eq(len(expire1_keys[sc[1]]), 4)
- eq(len(expire1_keys[sc[2]]), 0)
+ assert len(expire1_keys['STANDARD']) == 2
+ assert len(expire1_keys[sc[1]]) == 4
+ assert len(expire1_keys[sc[2]]) == 0
time.sleep(4*lc_interval)
expire1_keys = list_bucket_storage_class(client, bucket)
- eq(len(expire1_keys['STANDARD']), 2)
- eq(len(expire1_keys[sc[1]]), 0)
- eq(len(expire1_keys[sc[2]]), 4)
+ assert len(expire1_keys['STANDARD']) == 2
+ assert len(expire1_keys[sc[1]]) == 0
+ assert len(expire1_keys[sc[2]]) == 4
time.sleep(6*lc_interval)
expire1_keys = list_bucket_storage_class(client, bucket)
- eq(len(expire1_keys['STANDARD']), 2)
- eq(len(expire1_keys[sc[1]]), 0)
- eq(len(expire1_keys[sc[2]]), 0)
+ assert len(expire1_keys['STANDARD']) == 2
+ assert len(expire1_keys[sc[1]]) == 0
+ assert len(expire1_keys[sc[2]]) == 0
def verify_object(client, bucket, key, content=None, sc=None):
response = client.get_object(Bucket=bucket, Key=key)
sc = 'STANDARD'
if ('StorageClass' in response):
- eq(response['StorageClass'], sc)
+ assert response['StorageClass'] == sc
else: #storage class should be STANDARD
- eq('STANDARD', sc)
+ assert 'STANDARD' == sc
if (content != None):
body = _get_body(response)
- eq(body, content)
+ assert body == content
# The test harness for lifecycle is configured to treat days as 10 second intervals.
@attr(resource='bucket')
# Get list of all keys
response = client.list_objects(Bucket=bucket_name)
init_keys = _get_keys(response)
- eq(len(init_keys), 4)
+ assert len(init_keys) == 4
lc_interval = get_lc_debug_interval()
# Wait for first expiration (plus fudge to handle the timer window)
time.sleep(10*lc_interval)
expire1_keys = list_bucket_storage_class(client, bucket_name)
- eq(len(expire1_keys['STANDARD']), 2)
+ assert len(expire1_keys['STANDARD']) == 2
if (retain_head_object != None and retain_head_object == "true"):
- eq(len(expire1_keys[cloud_sc]), 2)
+ assert len(expire1_keys[cloud_sc]) == 2
else:
- eq(len(expire1_keys[cloud_sc]), 0)
+ assert len(expire1_keys[cloud_sc]) == 0
time.sleep(2*lc_interval)
# Check if objects copied to target path
if (retain_head_object != None and retain_head_object == "true"):
# verify HEAD response
response = client.head_object(Bucket=bucket_name, Key=keys[0])
- eq(0, response['ContentLength'])
- eq(cloud_sc, response['StorageClass'])
+ assert 0 == response['ContentLength']
+ assert cloud_sc == response['StorageClass']
# GET should return InvalidObjectState error
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=src_key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'InvalidObjectState')
+ assert status == 403
+ assert error_code == 'InvalidObjectState'
# COPY of object should return InvalidObjectState error
copy_source = {'Bucket': bucket_name, 'Key': src_key}
e = assert_raises(ClientError, client.copy, CopySource=copy_source, Bucket=bucket_name, Key='copy_obj')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'InvalidObjectState')
+ assert status == 403
+ assert error_code == 'InvalidObjectState'
# DELETE should succeed
response = client.delete_object(Bucket=bucket_name, Key=src_key)
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=src_key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'NoSuchKey')
+ assert status == 404
+ assert error_code == 'NoSuchKey'
# Similar to 'test_lifecycle_transition' but for cloud transition
@attr(resource='bucket')
# Get list of all keys
response = client.list_objects(Bucket=bucket_name)
init_keys = _get_keys(response)
- eq(len(init_keys), 4)
+ assert len(init_keys) == 4
lc_interval = get_lc_debug_interval()
# Wait for first expiration (plus fudge to handle the timer window)
time.sleep(4*lc_interval)
expire1_keys = list_bucket_storage_class(client, bucket_name)
- eq(len(expire1_keys['STANDARD']), 2)
- eq(len(expire1_keys[sc[1]]), 2)
- eq(len(expire1_keys[sc[2]]), 0)
+ assert len(expire1_keys['STANDARD']) == 2
+ assert len(expire1_keys[sc[1]]) == 2
+ assert len(expire1_keys[sc[2]]) == 0
# Wait for next expiration cycle
time.sleep(7*lc_interval)
expire1_keys = list_bucket_storage_class(client, bucket_name)
- eq(len(expire1_keys['STANDARD']), 2)
- eq(len(expire1_keys[sc[1]]), 0)
+ assert len(expire1_keys['STANDARD']) == 2
+ assert len(expire1_keys[sc[1]]) == 0
if (retain_head_object != None and retain_head_object == "true"):
- eq(len(expire1_keys[sc[2]]), 2)
+ assert len(expire1_keys[sc[2]]) == 2
else:
- eq(len(expire1_keys[sc[2]]), 0)
+ assert len(expire1_keys[sc[2]]) == 0
# Wait for final expiration cycle
time.sleep(12*lc_interval)
expire3_keys = list_bucket_storage_class(client, bucket_name)
- eq(len(expire3_keys['STANDARD']), 2)
- eq(len(expire3_keys[sc[1]]), 0)
- eq(len(expire3_keys[sc[2]]), 0)
+ assert len(expire3_keys['STANDARD']) == 2
+ assert len(expire3_keys[sc[1]]) == 0
+ assert len(expire3_keys[sc[2]]) == 0
# Noncurrent objects for cloud transition
@attr(resource='bucket')
create_multiple_versions(client, bucket, k, 3)
init_keys = list_bucket_storage_class(client, bucket)
- eq(len(init_keys['STANDARD']), 6)
+ assert len(init_keys['STANDARD']) == 6
response = client.list_object_versions(Bucket=bucket)
time.sleep(4*lc_interval)
expire1_keys = list_bucket_storage_class(client, bucket)
- eq(len(expire1_keys['STANDARD']), 2)
- eq(len(expire1_keys[sc[1]]), 4)
- eq(len(expire1_keys[sc[2]]), 0)
+ assert len(expire1_keys['STANDARD']) == 2
+ assert len(expire1_keys[sc[1]]) == 4
+ assert len(expire1_keys[sc[2]]) == 0
time.sleep(10*lc_interval)
expire1_keys = list_bucket_storage_class(client, bucket)
- eq(len(expire1_keys['STANDARD']), 2)
- eq(len(expire1_keys[sc[1]]), 0)
+ assert len(expire1_keys['STANDARD']) == 2
+ assert len(expire1_keys[sc[1]]) == 0
if (retain_head_object == None or retain_head_object == "false"):
- eq(len(expire1_keys[sc[2]]), 0)
+ assert len(expire1_keys[sc[2]]) == 0
else:
- eq(len(expire1_keys[sc[2]]), 4)
+ assert len(expire1_keys[sc[2]]) == 4
#check if versioned object exists on cloud endpoint
if target_path == None:
# Wait for first expiration (plus fudge to handle the timer window)
time.sleep(8*lc_interval)
expire1_keys = list_bucket_storage_class(client, bucket)
- eq(len(expire1_keys['STANDARD']), 1)
+ assert len(expire1_keys['STANDARD']) == 1
if (retain_head_object != None and retain_head_object == "true"):
- eq(len(expire1_keys[cloud_sc]), 1)
+ assert len(expire1_keys[cloud_sc]) == 1
else:
- eq(len(expire1_keys[cloud_sc]), 0)
+ assert len(expire1_keys[cloud_sc]) == 0
# Check if objects copied to target path
if target_path == None:
e = assert_raises(ClientError, client.head_object, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
lf = (lambda **kwargs: kwargs['params']['headers'].update(sse_client_headers))
client.meta.events.register('before-call.s3.HeadObject', lf)
response = client.head_object(Bucket=bucket_name, Key=key)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='object')
@attr(method='put')
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='object')
@attr(method='put')
client.meta.events.register('before-call.s3.GetObject', lf)
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='object')
@attr(method='put')
client.meta.events.register('before-call.s3.PutObject', lf)
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key, Body=data)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='object')
@attr(method='put')
client.meta.events.register('before-call.s3.PutObject', lf)
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key, Body=data)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
def _multipart_upload_enc(client, bucket_name, key, size, part_size, init_headers, part_headers, metadata, resend_parts):
"""
response = client.get_object(Bucket=bucket_name, Key=key, Range=r)
read_range = response['ContentLength']
body = _get_body(response)
- eq(read_range, toread)
- eq(body, data[ofs:end+1])
+ assert read_range == toread
+ assert body == data[ofs:end+1]
@attr(resource='object')
@attr(method='put')
response = client.head_bucket(Bucket=bucket_name)
rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 1))
- eq(rgw_object_count, 1)
+ assert rgw_object_count == 1
rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', objlen))
- eq(rgw_bytes_used, objlen)
+ assert rgw_bytes_used == objlen
lf = (lambda **kwargs: kwargs['params']['headers'].update(enc_headers))
client.meta.events.register('before-call.s3.GetObject', lf)
response = client.get_object(Bucket=bucket_name, Key=key)
- eq(response['Metadata'], metadata)
- eq(response['ResponseMetadata']['HTTPHeaders']['content-type'], content_type)
+ assert response['Metadata'] == metadata
+ assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == content_type
body = _get_body(response)
- eq(body, data)
+ assert body == data
size = response['ContentLength']
- eq(len(body), size)
+ assert len(body) == size
_check_content_using_range_enc(client, bucket_name, key, data, 1000000, enc_headers=enc_headers)
_check_content_using_range_enc(client, bucket_name, key, data, 10000000, enc_headers=enc_headers)
e = assert_raises(ClientError, _multipart_upload_enc, client=client, bucket_name=bucket_name,
key=key, size=objlen, part_size=5*1024*1024, init_headers=init_headers, part_headers=part_headers, metadata=metadata, resend_parts=resend_parts)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='object')
@attr(method='put')
e = assert_raises(ClientError, _multipart_upload_enc, client=client, bucket_name=bucket_name,
key=key, size=objlen, part_size=5*1024*1024, init_headers=init_headers, part_headers=part_headers, metadata=metadata, resend_parts=resend_parts)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='object')
@attr(method='put')
response = client.head_bucket(Bucket=bucket_name)
rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 1))
- eq(rgw_object_count, 1)
+ assert rgw_object_count == 1
rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', objlen))
- eq(rgw_bytes_used, objlen)
+ assert rgw_bytes_used == objlen
lf = (lambda **kwargs: kwargs['params']['headers'].update(put_headers))
client.meta.events.register('before-call.s3.GetObject', lf)
response = client.get_object(Bucket=bucket_name, Key=key)
- eq(response['Metadata'], metadata)
- eq(response['ResponseMetadata']['HTTPHeaders']['content-type'], content_type)
+ assert response['Metadata'] == metadata
+ assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == content_type
lf = (lambda **kwargs: kwargs['params']['headers'].update(get_headers))
client.meta.events.register('before-call.s3.GetObject', lf)
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='object')
('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
get_headers = {
'x-amz-server-side-encryption-customer-algorithm': 'AES256',
client.meta.events.register('before-call.s3.GetObject', lf)
response = client.get_object(Bucket=bucket_name, Key='foo.txt')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(assertion='success')
@attr('encryption')
response = client.get_object(Bucket=bucket_name, Key='testobj')
body = _get_body(response)
- eq(body, data)
+ assert body == data
client.put_object(Bucket=bucket_name, Key=key, Body=data)
response = client.head_object(Bucket=bucket_name, Key=key)
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'aws:kms')
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], kms_keyid)
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'aws:kms'
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'] == kms_keyid
lf = (lambda **kwargs: kwargs['params']['headers'].update(sse_kms_client_headers))
client.meta.events.register('before-call.s3.HeadObject', lf)
e = assert_raises(ClientError, client.head_object, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='object')
@attr(method='put')
response = client.get_object(Bucket=bucket_name, Key=key)
body = _get_body(response)
- eq(body, data)
+ assert body == data
@attr(resource='object')
@attr(method='put')
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key, Body=data)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='object')
@attr(method='put')
response = client.head_bucket(Bucket=bucket_name)
rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 1))
- eq(rgw_object_count, 1)
+ assert rgw_object_count == 1
rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', objlen))
- eq(rgw_bytes_used, objlen)
+ assert rgw_bytes_used == objlen
lf = (lambda **kwargs: kwargs['params']['headers'].update(part_headers))
client.meta.events.register('before-call.s3.UploadPart', lf)
response = client.get_object(Bucket=bucket_name, Key=key)
- eq(response['Metadata'], metadata)
- eq(response['ResponseMetadata']['HTTPHeaders']['content-type'], content_type)
+ assert response['Metadata'] == metadata
+ assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == content_type
body = _get_body(response)
- eq(body, data)
+ assert body == data
size = response['ContentLength']
- eq(len(body), size)
+ assert len(body) == size
_check_content_using_range(key, bucket_name, data, 1000000)
_check_content_using_range(key, bucket_name, data, 10000000)
('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key='foo.txt')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='put')
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='bucket')
@attr(method='get')
alt_client = get_alt_client()
response = alt_client.list_objects(Bucket=bucket_name)
- eq(len(response['Contents']), 1)
+ assert len(response['Contents']) == 1
@attr('bucket-policy')
@pytest.mark.bucket_policy
alt_client = get_alt_client()
response = alt_client.list_objects_v2(Bucket=bucket_name)
- eq(len(response['Contents']), 1)
+ assert len(response['Contents']) == 1
@attr(resource='bucket')
@attr(method='get')
alt_client = get_alt_client()
e = assert_raises(ClientError, alt_client.list_objects, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
client.delete_bucket_policy(Bucket=bucket_name)
client.put_bucket_acl(Bucket=bucket_name, ACL='public-read')
alt_client = get_alt_client()
e = assert_raises(ClientError, alt_client.list_objects_v2, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
client.delete_bucket_policy(Bucket=bucket_name)
client.put_bucket_acl(Bucket=bucket_name, ACL='public-read')
#alt_client = get_alt_client()
#response = alt_client.list_objects(Bucket=bucket_name)
- eq(len(response['Contents']), 1)
+ assert len(response['Contents']) == 1
@attr(resource='bucket')
@attr(method='get')
#alt_client = get_alt_client()
#response = alt_client.list_objects_v2(Bucket=bucket_name)
- eq(len(response['Contents']), 1)
+ assert len(response['Contents']) == 1
@attr(resource='bucket')
@attr(method='get')
alt_client = get_alt_client()
response = alt_client.list_objects(Bucket=bucket_name)
- eq(len(response['Contents']), 1)
+ assert len(response['Contents']) == 1
alt_client = get_alt_client()
response = alt_client.list_objects(Bucket=bucket_name2)
- eq(len(response['Contents']), 1)
+ assert len(response['Contents']) == 1
@attr(resource='bucket')
@attr(method='get')
alt_client = get_alt_client()
response = alt_client.list_objects_v2(Bucket=bucket_name)
- eq(len(response['Contents']), 1)
+ assert len(response['Contents']) == 1
alt_client = get_alt_client()
response = alt_client.list_objects_v2(Bucket=bucket_name2)
- eq(len(response['Contents']), 1)
+ assert len(response['Contents']) == 1
@attr(resource='bucket')
@attr(method='put')
client.meta.events.register('before-call.s3.GetObject', lf)
response = client.get_object(Bucket=bucket_name, Key=key)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
request_headers={'referer': 'http://www.example.com/index.html'}
client.meta.events.register('before-call.s3.GetObject', lf)
response = client.get_object(Bucket=bucket_name, Key=key)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
# the 'referer' headers need to be removed for this one
#response = client.get_object(Bucket=bucket_name, Key=key)
- #eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ #assert response['ResponseMetadata']['HTTPStatusCode'] == 200
request_headers={'referer': 'http://example.com'}
# TODO: Compare Requests sent in Boto3, Wireshark, RGW Log for both boto and boto3
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
response = client.get_bucket_policy(Bucket=bucket_name)
print(response)
input_tagset = _create_simple_tagset(2)
response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(response['TagSet'], input_tagset['TagSet'])
+ assert response['TagSet'] == input_tagset['TagSet']
@attr(resource='object')
input_tagset = _create_simple_tagset(count)
response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
response = client.head_object(Bucket=bucket_name, Key=key)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-tagging-count'], str(count))
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-tagging-count'] == str(count)
@attr(resource='object')
@attr(method='get')
input_tagset = _create_simple_tagset(10)
response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(response['TagSet'], input_tagset['TagSet'])
+ assert response['TagSet'] == input_tagset['TagSet']
@attr(resource='object')
@attr(method='get')
input_tagset = _create_simple_tagset(11)
e = assert_raises(ClientError, client.put_object_tagging, Bucket=bucket_name, Key=key, Tagging=input_tagset)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidTag')
+ assert status == 400
+ assert error_code == 'InvalidTag'
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(len(response['TagSet']), 0)
+ assert len(response['TagSet']) == 0
@attr(resource='object')
@attr(method='get')
input_tagset = {'TagSet': tagset}
response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
for kv_pair in response['TagSet']:
- eq((kv_pair in input_tagset['TagSet']), True)
+ assert kv_pair in input_tagset['TagSet']
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, client.put_object_tagging, Bucket=bucket_name, Key=key, Tagging=input_tagset)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidTag')
+ assert status == 400
+ assert error_code == 'InvalidTag'
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(len(response['TagSet']), 0)
+ assert len(response['TagSet']) == 0
@attr(resource='object')
@attr(method='get')
e = assert_raises(ClientError, client.put_object_tagging, Bucket=bucket_name, Key=key, Tagging=input_tagset)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidTag')
+ assert status == 400
+ assert error_code == 'InvalidTag'
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(len(response['TagSet']), 0)
+ assert len(response['TagSet']) == 0
@attr(resource='object')
@attr(method='get')
input_tagset = {'TagSet': tagset}
response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(response['TagSet'], input_tagset['TagSet'])
+ assert response['TagSet'] == input_tagset['TagSet']
tagset2 = []
tagset2.append({'Key': 'key3', 'Value': 'val3'})
input_tagset2 = {'TagSet': tagset2}
response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset2)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(response['TagSet'], input_tagset2['TagSet'])
+ assert response['TagSet'] == input_tagset2['TagSet']
@attr(resource='object')
@attr(method='get')
input_tagset = _create_simple_tagset(2)
response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(response['TagSet'], input_tagset['TagSet'])
+ assert response['TagSet'] == input_tagset['TagSet']
response = client.delete_object_tagging(Bucket=bucket_name, Key=key)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 204)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 204
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(len(response['TagSet']), 0)
+ assert len(response['TagSet']) == 0
@attr(resource='object')
@attr(method='post')
])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key=key_name)
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
response = client.get_object_tagging(Bucket=bucket_name, Key=key_name)
- eq(response['TagSet'], input_tagset['TagSet'])
+ assert response['TagSet'] == input_tagset['TagSet']
@attr(resource='object')
@attr(method='post')
('file', ('bar'))])
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key='foo.txt')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
client.put_object(Bucket=bucket_name, Key=key, Body=data)
response = client.get_object(Bucket=bucket_name, Key=key)
body = _get_body(response)
- eq(body, data)
+ assert body == data
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
response_tagset = response['TagSet']
tagset = tagset
- eq(response_tagset, tagset)
+ assert response_tagset == tagset
def _make_arn_resource(path="*"):
return "arn:aws:s3:::{}".format(path)
input_tagset = _create_simple_tagset(10)
response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
alt_client = get_alt_client()
response = alt_client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(response['TagSet'], input_tagset['TagSet'])
+ assert response['TagSet'] == input_tagset['TagSet']
@attr(resource='object')
@attr(method='get')
input_tagset = _create_simple_tagset(10)
alt_client = get_alt_client()
response = alt_client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(response['TagSet'], input_tagset['TagSet'])
+ assert response['TagSet'] == input_tagset['TagSet']
@attr(resource='object')
@attr(method='get')
input_tagset = _create_simple_tagset(10)
response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
alt_client = get_alt_client()
response = alt_client.delete_object_tagging(Bucket=bucket_name, Key=key)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 204)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 204
response = client.get_object_tagging(Bucket=bucket_name, Key=key)
- eq(len(response['TagSet']), 0)
+ assert len(response['TagSet']) == 0
@attr(resource='object')
@attr(method='put')
response = client.list_object_versions(Bucket=bucket_name)
versions = response['Versions']
for version in versions:
- eq(version['VersionId'], version_id)
+ assert version['VersionId'] == version_id
# for versioning-default-bucket, no version-id should return.
bucket_name = get_new_bucket()
key = 'baz'
response = client.put_object(Bucket=bucket_name, Key=key)
- eq(('VersionId' in response), False)
+ assert not 'VersionId' in response
# for versioning-suspended-bucket, no version-id should return.
bucket_name = get_new_bucket()
key = 'baz'
check_configure_versioning_retry(bucket_name, "Suspended", "Suspended")
response = client.put_object(Bucket=bucket_name, Key=key)
- eq(('VersionId' in response), False)
+ assert not 'VersionId' in response
@attr(resource='object')
@attr(method='put')
response = client.list_object_versions(Bucket=bucket_name)
versions = response['Versions']
for version in versions:
- eq(version['VersionId'], version_id)
+ assert version['VersionId'] == version_id
# for versioning-default-bucket, no version-id should return.
bucket_name = get_new_bucket()
(upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key, size=objlen, client=client, content_type=content_type, metadata=metadata)
response = client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts})
- eq(('VersionId' in response), False)
+ assert not 'VersionId' in response
# for versioning-suspended-bucket, no version-id should return
bucket_name = get_new_bucket()
(upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key, size=objlen, client=client, content_type=content_type, metadata=metadata)
response = client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts})
- eq(('VersionId' in response), False)
+ assert not 'VersionId' in response
@attr(resource='object')
@attr(method='get')
input_tagset = {'TagSet': tagset}
response = client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
tagset2 = []
tagset2.append({'Key': 'security', 'Value': 'private'})
input_tagset = {'TagSet': tagset2}
response = client.put_object_tagging(Bucket=bucket_name, Key='privatetag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
tagset3 = []
tagset3.append({'Key': 'security1', 'Value': 'public'})
input_tagset = {'TagSet': tagset3}
response = client.put_object_tagging(Bucket=bucket_name, Key='invalidtag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
alt_client = get_alt_client()
response = alt_client.get_object(Bucket=bucket_name, Key='publictag')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
e = assert_raises(ClientError, alt_client.get_object, Bucket=bucket_name, Key='privatetag')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
e = assert_raises(ClientError, alt_client.get_object, Bucket=bucket_name, Key='invalidtag')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
@attr(resource='object')
@attr(method='get')
input_tagset = {'TagSet': tagset}
response = client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
tagset2 = []
tagset2.append({'Key': 'security', 'Value': 'private'})
input_tagset = {'TagSet': tagset2}
response = client.put_object_tagging(Bucket=bucket_name, Key='privatetag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
tagset3 = []
tagset3.append({'Key': 'security1', 'Value': 'public'})
input_tagset = {'TagSet': tagset3}
response = client.put_object_tagging(Bucket=bucket_name, Key='invalidtag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
alt_client = get_alt_client()
response = alt_client.get_object_tagging(Bucket=bucket_name, Key='publictag')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
# A get object itself should fail since we allowed only GetObjectTagging
e = assert_raises(ClientError, alt_client.get_object, Bucket=bucket_name, Key='publictag')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
e = assert_raises(ClientError, alt_client.get_object_tagging, Bucket=bucket_name, Key='privatetag')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
e = assert_raises(ClientError, alt_client.get_object_tagging, Bucket=bucket_name, Key='invalidtag')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
@attr(resource='object')
input_tagset = {'TagSet': tagset}
response = client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
tagset2 = []
tagset2.append({'Key': 'security', 'Value': 'private'})
input_tagset = {'TagSet': tagset2}
response = client.put_object_tagging(Bucket=bucket_name, Key='privatetag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
alt_client = get_alt_client()
# PUT requests with object tagging are a bit wierd, if you forget to put
input_tagset = {'TagSet': testtagset1}
response = alt_client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
e = assert_raises(ClientError, alt_client.put_object_tagging, Bucket=bucket_name, Key='privatetag', Tagging=input_tagset)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
testtagset2 = []
testtagset2.append({'Key': 'security', 'Value': 'private'})
input_tagset = {'TagSet': testtagset2}
response = alt_client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
# Now try putting the original tags again, this should fail
input_tagset = {'TagSet': testtagset1}
e = assert_raises(ClientError, alt_client.put_object_tagging, Bucket=bucket_name, Key='publictag', Tagging=input_tagset)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
@attr(resource='object')
# policy on how to do this right
response = alt_client.get_object(Bucket=bucket_name2, Key='new_foo')
body = _get_body(response)
- eq(body, 'public/foo')
+ assert body == 'public/foo'
copy_source = {'Bucket': bucket_name, 'Key': 'public/bar'}
alt_client.copy_object(Bucket=bucket_name2, CopySource=copy_source, Key='new_foo2')
response = alt_client.get_object(Bucket=bucket_name2, Key='new_foo2')
body = _get_body(response)
- eq(body, 'public/bar')
+ assert body == 'public/bar'
copy_source = {'Bucket': bucket_name, 'Key': 'private/foo'}
check_access_denied(alt_client.copy_object, Bucket=bucket_name2, CopySource=copy_source, Key='new_foo2')
# policy on how to do this right
response = alt_client.get_object(Bucket=bucket_name, Key='new_foo')
body = _get_body(response)
- eq(body, 'public/foo')
+ assert body == 'public/foo'
# remove the x-amz-metadata-directive header
def remove_header(**kwargs):
# as an ERROR anyway
response = alt_client.put_object(Bucket=bucket_name, Key=key1, Body=key1)
#response = alt_client.put_object_acl(Bucket=bucket_name, Key=key1, ACL='private')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
key2 = 'public-key'
e = assert_raises(ClientError, alt_client.put_object, Bucket=bucket_name, Key=key2, Body=key2)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
@attr(resource='object')
alt_client.meta.events.register('before-call.s3.PutObject', lf)
response = alt_client.put_object(Bucket=bucket_name, Key=key1, Body=key1)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
def remove_header(**kwargs):
if ("x-amz-grant-full-control" in kwargs['params']['headers']):
key2 = 'key2'
response = alt_client.put_object(Bucket=bucket_name2, Key=key2, Body=key2)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
acl1_response = client.get_object_acl(Bucket=bucket_name, Key=key1)
acl2_response = alt_client.get_object_acl(Bucket=bucket_name2, Key=key2)
- eq(acl1_response['Grants'][0]['Grantee']['ID'], main_user_id)
- eq(acl2_response['Grants'][0]['Grantee']['ID'], alt_user_id)
+ assert acl1_response['Grants'][0]['Grantee']['ID'] == main_user_id
+ assert acl2_response['Grants'][0]['Grantee']['ID'] == alt_user_id
@attr(resource='object')
client.meta.events.register('before-call.s3.PutObject', lf)
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key1_str)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidArgument')
+ assert status == 400
+ assert error_code == 'InvalidArgument'
@attr(resource='object')
@attr(method='put')
client.meta.events.register('before-call.s3.PutObject', lf)
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key1_str)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidArgument')
+ assert status == 400
+ assert error_code == 'InvalidArgument'
@attr(resource='object')
@attr(method='put')
client.meta.events.register('before-call.s3.PutObject', lf)
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key1_str)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidArgument')
+ assert status == 400
+ assert error_code == 'InvalidArgument'
@attr(resource='object')
@attr(method='put')
client.meta.events.register('before-call.s3.PutObject', lf)
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key1_str)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidArgument')
+ assert status == 400
+ assert error_code == 'InvalidArgument'
@attr(resource='object')
@attr(method='put')
# first validate that writing a sse-s3 object works
response = client.put_object(Bucket=bucket_name, Key=key1_str, ServerSideEncryption='AES256')
response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption']
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256')
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256'
# then validate that a non-encrypted object fails.
# (this also breaks the connection--non-sse bug, probably because the server
# breaks next call...
response = client.put_object(Bucket=bucket_name, Key=key1_str,
ServerSideEncryption='aws:kms', SSEKMSKeyId=kms_keyid)
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'aws:kms')
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], kms_keyid)
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'aws:kms'
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'] == kms_keyid
check_access_denied(client.put_object, Bucket=bucket_name, Key=key2_str, Body=key2_str)
input_tagset = {'TagSet': tagset}
response = client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
tagset2 = []
tagset2.append({'Key': 'security', 'Value': 'private'})
input_tagset = {'TagSet': tagset2}
response = client.put_object_tagging(Bucket=bucket_name, Key='privatetag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
tagset3 = []
tagset3.append({'Key': 'security1', 'Value': 'public'})
input_tagset = {'TagSet': tagset3}
response = client.put_object_tagging(Bucket=bucket_name, Key='invalidtag', Tagging=input_tagset)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
alt_client = get_alt_client()
response = alt_client.get_object_acl(Bucket=bucket_name, Key='publictag')
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
# A get object itself should fail since we allowed only GetObjectTagging
e = assert_raises(ClientError, alt_client.get_object, Bucket=bucket_name, Key='publictag')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
e = assert_raises(ClientError, alt_client.get_object_tagging, Bucket=bucket_name, Key='privatetag')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
e = assert_raises(ClientError, alt_client.get_object_tagging, Bucket=bucket_name, Key='invalidtag')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
@attr(resource='bucket')
response = client.put_object_lock_configuration(
Bucket=bucket_name,
ObjectLockConfiguration=conf)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
conf = {'ObjectLockEnabled':'Enabled',
'Rule': {
response = client.put_object_lock_configuration(
Bucket=bucket_name,
ObjectLockConfiguration=conf)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
response = client.get_bucket_versioning(Bucket=bucket_name)
- eq(response['Status'], 'Enabled')
+ assert response['Status'] == 'Enabled'
@attr(resource='bucket')
}}
e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 409)
- eq(error_code, 'InvalidBucketState')
+ assert status == 409
+ assert error_code == 'InvalidBucketState'
@attr(resource='bucket')
}}
e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'MalformedXML')
+ assert status == 400
+ assert error_code == 'MalformedXML'
@attr(resource='bucket')
}}
e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidRetentionPeriod')
+ assert status == 400
+ assert error_code == 'InvalidRetentionPeriod'
@attr(resource='bucket')
}}
e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidRetentionPeriod')
+ assert status == 400
+ assert error_code == 'InvalidRetentionPeriod'
@attr(resource='bucket')
}}
e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'MalformedXML')
+ assert status == 400
+ assert error_code == 'MalformedXML'
conf = {'ObjectLockEnabled':'Enabled',
'Rule': {
}}
e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'MalformedXML')
+ assert status == 400
+ assert error_code == 'MalformedXML'
attr(resource='bucket')
}}
e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'MalformedXML')
+ assert status == 400
+ assert error_code == 'MalformedXML'
attr(resource='bucket')
client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
e = assert_raises(ClientError, client.put_bucket_versioning, Bucket=bucket_name, VersioningConfiguration={'Status': 'Suspended'})
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 409)
- eq(error_code, 'InvalidBucketState')
+ assert status == 409
+ assert error_code == 'InvalidBucketState'
@attr(resource='bucket')
Bucket=bucket_name,
ObjectLockConfiguration=conf)
response = client.get_object_lock_configuration(Bucket=bucket_name)
- eq(response['ObjectLockConfiguration'], conf)
+ assert response['ObjectLockConfiguration'] == conf
@attr(resource='bucket')
client.create_bucket(Bucket=bucket_name)
e = assert_raises(ClientError, client.get_object_lock_configuration, Bucket=bucket_name)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 404)
- eq(error_code, 'ObjectLockConfigurationNotFoundError')
+ assert status == 404
+ assert error_code == 'ObjectLockConfigurationNotFoundError'
@attr(resource='bucket')
version_id = response['VersionId']
retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)}
response = client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)
retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)}
e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidRequest')
+ assert status == 400
+ assert error_code == 'InvalidRequest'
@attr(resource='bucket')
retention = {'Mode':'governance', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)}
e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'MalformedXML')
+ assert status == 400
+ assert error_code == 'MalformedXML'
retention = {'Mode':'abc', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)}
e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'MalformedXML')
+ assert status == 400
+ assert error_code == 'MalformedXML'
@attr(resource='bucket')
retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)}
client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)
response = client.get_object_retention(Bucket=bucket_name, Key=key)
- eq(response['Retention'], retention)
+ assert response['Retention'] == retention
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)
client.put_object(Bucket=bucket_name, Body='abc', Key=key)
e = assert_raises(ClientError, client.get_object_retention, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidRequest')
+ assert status == 400
+ assert error_code == 'InvalidRequest'
@attr(resource='bucket')
retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)}
client.put_object_retention(Bucket=bucket_name, Key=key, VersionId=version_id, Retention=retention)
response = client.get_object_retention(Bucket=bucket_name, Key=key, VersionId=version_id)
- eq(response['Retention'], retention)
+ assert response['Retention'] == retention
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)
retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)}
client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)
response = client.get_object_retention(Bucket=bucket_name, Key=key)
- eq(response['Retention'], retention)
+ assert response['Retention'] == retention
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)
retention2 = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,3,tzinfo=pytz.UTC)}
client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention2)
response = client.get_object_retention(Bucket=bucket_name, Key=key)
- eq(response['Retention'], retention2)
+ assert response['Retention'] == retention2
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)
retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)}
e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)
retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)}
client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention, BypassGovernanceRetention=True)
response = client.get_object_retention(Bucket=bucket_name, Key=key)
- eq(response['Retention'], retention)
+ assert response['Retention'] == retention
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)
client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)
e = assert_raises(ClientError, client.delete_object, Bucket=bucket_name, Key=key, VersionId=response['VersionId'])
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 204)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 204
@attr(resource='bucket')
@attr(method='delete')
del_response = client.delete_object(Bucket=bucket_name, Key=key)
e = assert_raises(ClientError, client.delete_object, Bucket=bucket_name, Key=key, VersionId=response['VersionId'])
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
client.delete_object(Bucket=bucket_name, Key=key, VersionId=del_response['VersionId'])
e = assert_raises(ClientError, client.delete_object, Bucket=bucket_name, Key=key, VersionId=response['VersionId'])
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 204)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 204
@attr(resource='object')
@attr(method='delete')
}
)
- eq(len(delete_response['Deleted']), 1)
- eq(len(delete_response['Errors']), 1)
+ assert len(delete_response['Deleted']) == 1
+ assert len(delete_response['Errors']) == 1
failed_object = delete_response['Errors'][0]
- eq(failed_object['Code'], 'AccessDenied')
- eq(failed_object['Key'], key1)
- eq(failed_object['VersionId'], versionId1)
+ assert failed_object['Code'] == 'AccessDenied'
+ assert failed_object['Key'] == key1
+ assert failed_object['VersionId'] == versionId1
deleted_object = delete_response['Deleted'][0]
- eq(deleted_object['Key'], key2)
- eq(deleted_object['VersionId'], versionId2)
+ assert deleted_object['Key'] == key2
+ assert deleted_object['VersionId'] == versionId2
delete_response = client.delete_objects(
Bucket=bucket_name,
)
assert( ('Errors' not in delete_response) or (len(delete_response['Errors']) == 0) )
- eq(len(delete_response['Deleted']), 1)
+ assert len(delete_response['Deleted']) == 1
deleted_object = delete_response['Deleted'][0]
- eq(deleted_object['Key'], key1)
- eq(deleted_object['VersionId'], versionId1)
+ assert deleted_object['Key'] == key1
+ assert deleted_object['VersionId'] == versionId1
client.put_object(Bucket=bucket_name, Body='abc', Key=key)
legal_hold = {'Status': 'ON'}
response = client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold=legal_hold)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
response = client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status':'OFF'})
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='bucket')
legal_hold = {'Status': 'ON'}
e = assert_raises(ClientError, client.put_object_legal_hold, Bucket=bucket_name, Key=key, LegalHold=legal_hold)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidRequest')
+ assert status == 400
+ assert error_code == 'InvalidRequest'
@attr(resource='bucket')
legal_hold = {'Status': 'abc'}
e = assert_raises(ClientError, client.put_object_legal_hold, Bucket=bucket_name, Key=key, LegalHold=legal_hold)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'MalformedXML')
+ assert status == 400
+ assert error_code == 'MalformedXML'
@attr(resource='bucket')
legal_hold = {'Status': 'ON'}
client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold=legal_hold)
response = client.get_object_legal_hold(Bucket=bucket_name, Key=key)
- eq(response['LegalHold'], legal_hold)
+ assert response['LegalHold'] == legal_hold
legal_hold_off = {'Status': 'OFF'}
client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold=legal_hold_off)
response = client.get_object_legal_hold(Bucket=bucket_name, Key=key)
- eq(response['LegalHold'], legal_hold_off)
+ assert response['LegalHold'] == legal_hold_off
@attr(resource='bucket')
client.put_object(Bucket=bucket_name, Body='abc', Key=key)
e = assert_raises(ClientError, client.get_object_legal_hold, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(error_code, 'InvalidRequest')
+ assert status == 400
+ assert error_code == 'InvalidRequest'
@attr(resource='bucket')
client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status': 'ON'})
e = assert_raises(ClientError, client.delete_object, Bucket=bucket_name, Key=key, VersionId=response['VersionId'])
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status':'OFF'})
response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)
client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status': 'OFF'})
response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'])
- eq(response['ResponseMetadata']['HTTPStatusCode'], 204)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 204
@attr(resource='bucket')
retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)}
client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)
response = client.head_object(Bucket=bucket_name, Key=key)
- eq(response['ObjectLockMode'], retention['Mode'])
- eq(response['ObjectLockRetainUntilDate'], retention['RetainUntilDate'])
- eq(response['ObjectLockLegalHoldStatus'], legal_hold['Status'])
+ assert response['ObjectLockMode'] == retention['Mode']
+ assert response['ObjectLockRetainUntilDate'] == retention['RetainUntilDate']
+ assert response['ObjectLockLegalHoldStatus'] == legal_hold['Status']
client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status':'OFF'})
client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True)
ObjectLockRetainUntilDate=datetime.datetime(2030,1,1,tzinfo=pytz.UTC), ObjectLockLegalHoldStatus='ON')
response = client.head_object(Bucket=bucket_name, Key=key)
- eq(response['ObjectLockMode'], 'GOVERNANCE')
- eq(response['ObjectLockRetainUntilDate'], datetime.datetime(2030,1,1,tzinfo=pytz.UTC))
- eq(response['ObjectLockLegalHoldStatus'], 'ON')
+ assert response['ObjectLockMode'] == 'GOVERNANCE'
+ assert response['ObjectLockRetainUntilDate'] == datetime.datetime(2030,1,1,tzinfo=pytz.UTC)
+ assert response['ObjectLockLegalHoldStatus'] == 'ON'
client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status':'OFF'})
client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True)
retention = {'Mode':'COMPLIANCE', 'RetainUntilDate':retain_until}
e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
@attr(resource='object')
@attr(method='put')
retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':retain_until}
e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
- eq(error_code, 'AccessDenied')
+ assert status == 403
+ assert error_code == 'AccessDenied'
@attr(resource='object')
@attr(method='copy')
client.copy_object(Bucket=bucket_name, CopySource=bucket_name+'/foo', CopySourceIfMatch=resp['ETag'], Key='bar')
response = client.get_object(Bucket=bucket_name, Key='bar')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='copy')
e = assert_raises(ClientError, client.copy_object, Bucket=bucket_name, CopySource=bucket_name+'/foo', CopySourceIfMatch='ABCORZ', Key='bar')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 412)
- eq(error_code, 'PreconditionFailed')
+ assert status == 412
+ assert error_code == 'PreconditionFailed'
@attr(resource='object')
@attr(method='copy')
e = assert_raises(ClientError, client.copy_object, Bucket=bucket_name, CopySource=bucket_name+'/foo', CopySourceIfNoneMatch=resp['ETag'], Key='bar')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 412)
- eq(error_code, 'PreconditionFailed')
+ assert status == 412
+ assert error_code == 'PreconditionFailed'
@attr(resource='object')
@attr(method='copy')
client.copy_object(Bucket=bucket_name, CopySource=bucket_name+'/foo', CopySourceIfNoneMatch='ABCORZ', Key='bar')
response = client.get_object(Bucket=bucket_name, Key='bar')
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='get')
client = get_client()
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='\xae\x8a-')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
- eq(e.response['Error']['Message'], 'Couldn\'t parse the specified URI.')
+ assert status == 400
+ assert e.response['Error']['Message'] == 'Couldn\'t parse the specified URI.'
@attr(resource='bucket')
@attr(method='get')
bucket_name = get_new_bucket()
client = get_client()
resp = client.get_bucket_policy_status(Bucket=bucket_name)
- eq(resp['PolicyStatus']['IsPublic'],False)
+ assert resp['PolicyStatus']['IsPublic'] == False
@attr(resource='bucket')
@attr(method='get')
client = get_client()
client.put_bucket_acl(Bucket=bucket_name, ACL='public-read')
resp = client.get_bucket_policy_status(Bucket=bucket_name)
- eq(resp['PolicyStatus']['IsPublic'],True)
+ assert resp['PolicyStatus']['IsPublic'] == True
@attr(resource='bucket')
@attr(method='get')
client = get_client()
client.put_bucket_acl(Bucket=bucket_name, ACL='authenticated-read')
resp = client.get_bucket_policy_status(Bucket=bucket_name)
- eq(resp['PolicyStatus']['IsPublic'],True)
+ assert resp['PolicyStatus']['IsPublic'] == True
@attr(resource='bucket')
client = get_client()
resp = client.get_bucket_policy_status(Bucket=bucket_name)
- eq(resp['PolicyStatus']['IsPublic'],False)
+ assert resp['PolicyStatus']['IsPublic'] == False
resource1 = "arn:aws:s3:::" + bucket_name
resource2 = "arn:aws:s3:::" + bucket_name + "/*"
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
resp = client.get_bucket_policy_status(Bucket=bucket_name)
- eq(resp['PolicyStatus']['IsPublic'],True)
+ assert resp['PolicyStatus']['IsPublic'] == True
@attr(resource='bucket')
client = get_client()
resp = client.get_bucket_policy_status(Bucket=bucket_name)
- eq(resp['PolicyStatus']['IsPublic'],False)
+ assert resp['PolicyStatus']['IsPublic'] == False
resource1 = "arn:aws:s3:::" + bucket_name
resource2 = "arn:aws:s3:::" + bucket_name + "/*"
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
resp = client.get_bucket_policy_status(Bucket=bucket_name)
- eq(resp['PolicyStatus']['IsPublic'],False)
+ assert resp['PolicyStatus']['IsPublic'] == False
@attr(resource='bucket')
client = get_client()
resp = client.get_bucket_policy_status(Bucket=bucket_name)
- eq(resp['PolicyStatus']['IsPublic'],False)
+ assert resp['PolicyStatus']['IsPublic'] == False
resource1 = "arn:aws:s3:::" + bucket_name
resource2 = "arn:aws:s3:::" + bucket_name + "/*"
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
resp = client.get_bucket_policy_status(Bucket=bucket_name)
- eq(resp['PolicyStatus']['IsPublic'],True)
+ assert resp['PolicyStatus']['IsPublic'] == True
@attr(resource='bucket')
@attr(method='get')
client = get_client()
resp = client.get_public_access_block(Bucket=bucket_name)
- eq(resp['PublicAccessBlockConfiguration']['BlockPublicAcls'], False)
- eq(resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'], False)
- eq(resp['PublicAccessBlockConfiguration']['IgnorePublicAcls'], False)
- eq(resp['PublicAccessBlockConfiguration']['RestrictPublicBuckets'], False)
+ assert resp['PublicAccessBlockConfiguration']['BlockPublicAcls'] == False
+ assert resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'] == False
+ assert resp['PublicAccessBlockConfiguration']['IgnorePublicAcls'] == False
+ assert resp['PublicAccessBlockConfiguration']['RestrictPublicBuckets'] == False
@attr(resource='bucket')
@attr(method='put')
client.put_public_access_block(Bucket=bucket_name, PublicAccessBlockConfiguration=access_conf)
resp = client.get_public_access_block(Bucket=bucket_name)
- eq(resp['PublicAccessBlockConfiguration']['BlockPublicAcls'], access_conf['BlockPublicAcls'])
- eq(resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'], access_conf['BlockPublicPolicy'])
- eq(resp['PublicAccessBlockConfiguration']['IgnorePublicAcls'], access_conf['IgnorePublicAcls'])
- eq(resp['PublicAccessBlockConfiguration']['RestrictPublicBuckets'], access_conf['RestrictPublicBuckets'])
+ assert resp['PublicAccessBlockConfiguration']['BlockPublicAcls'] == access_conf['BlockPublicAcls']
+ assert resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'] == access_conf['BlockPublicPolicy']
+ assert resp['PublicAccessBlockConfiguration']['IgnorePublicAcls'] == access_conf['IgnorePublicAcls']
+ assert resp['PublicAccessBlockConfiguration']['RestrictPublicBuckets'] == access_conf['RestrictPublicBuckets']
@attr(resource='bucket')
client.put_public_access_block(Bucket=bucket_name, PublicAccessBlockConfiguration=access_conf)
resp = client.get_public_access_block(Bucket=bucket_name)
- eq(resp['PublicAccessBlockConfiguration']['BlockPublicAcls'], access_conf['BlockPublicAcls'])
- eq(resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'], access_conf['BlockPublicPolicy'])
+ assert resp['PublicAccessBlockConfiguration']['BlockPublicAcls'] == access_conf['BlockPublicAcls']
+ assert resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'] == access_conf['BlockPublicPolicy']
e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name,ACL='public-read')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name,ACL='public-read-write')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name,ACL='authenticated-read')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
@attr(resource='bucket')
client.put_public_access_block(Bucket=bucket_name, PublicAccessBlockConfiguration=access_conf)
# resp = client.get_public_access_block(Bucket=bucket_name)
- # eq(resp['PublicAccessBlockConfiguration']['BlockPublicAcls'], access_conf['BlockPublicAcls'])
- # eq(resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'], access_conf['BlockPublicPolicy'])
+ # assert resp['PublicAccessBlockConfiguration']['BlockPublicAcls'] == access_conf['BlockPublicAcls']
+ # assert resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'] == access_conf['BlockPublicPolicy']
#FIXME: use empty body until #42208
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo1', Body='', ACL='public-read')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo2', Body='', ACL='public-read')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo3', Body='', ACL='authenticated-read')
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 403)
+ assert status == 403
@attr(resource='bucket')
client.put_object(Bucket=bucket_name,Key='key1',Body='abcde',ACL='public-read')
resp=alt_client.get_object(Bucket=bucket_name, Key='key1')
- eq(_get_body(resp), 'abcde')
+ assert _get_body(resp) == 'abcde'
access_conf = {'BlockPublicAcls': False,
'IgnorePublicAcls': True,
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
(upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key, size=objlen, client=client)
response = client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts})
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
def _put_bucket_encryption_s3(client, bucket_name):
"""
]
}
response = client.put_bucket_encryption(Bucket=bucket_name, ServerSideEncryptionConfiguration=server_side_encryption_conf)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
def _put_bucket_encryption_kms(client, bucket_name):
"""
]
}
response = client.put_bucket_encryption(Bucket=bucket_name, ServerSideEncryptionConfiguration=server_side_encryption_conf)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@attr(resource='bucket')
except ClientError as e:
response_code = e.response['Error']['Code']
- eq(response_code, 'ServerSideEncryptionConfigurationNotFoundError')
+ assert response_code == 'ServerSideEncryptionConfigurationNotFoundError'
_put_bucket_encryption_s3(client, bucket_name)
response = client.get_bucket_encryption(Bucket=bucket_name)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
- eq(response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['SSEAlgorithm'], 'AES256')
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
+ assert response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['SSEAlgorithm'] == 'AES256'
@attr(resource='bucket')
except ClientError as e:
response_code = e.response['Error']['Code']
- eq(response_code, 'ServerSideEncryptionConfigurationNotFoundError')
+ assert response_code == 'ServerSideEncryptionConfigurationNotFoundError'
_put_bucket_encryption_kms(client, bucket_name)
response = client.get_bucket_encryption(Bucket=bucket_name)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
- eq(response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['SSEAlgorithm'], 'aws:kms')
- eq(response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['KMSMasterKeyID'], kms_keyid)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
+ assert response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['SSEAlgorithm'] == 'aws:kms'
+ assert response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['KMSMasterKeyID'] == kms_keyid
@attr(resource='bucket')
client = get_client()
response = client.delete_bucket_encryption(Bucket=bucket_name)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 204)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 204
_put_bucket_encryption_s3(client, bucket_name)
response = client.delete_bucket_encryption(Bucket=bucket_name)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 204)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 204
response_code = ""
try:
except ClientError as e:
response_code = e.response['Error']['Code']
- eq(response_code, 'ServerSideEncryptionConfigurationNotFoundError')
+ assert response_code == 'ServerSideEncryptionConfigurationNotFoundError'
@attr(resource='bucket')
client = get_client()
response = client.delete_bucket_encryption(Bucket=bucket_name)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 204)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 204
_put_bucket_encryption_kms(client, bucket_name)
response = client.delete_bucket_encryption(Bucket=bucket_name)
- eq(response['ResponseMetadata']['HTTPStatusCode'], 204)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 204
response_code = ""
try:
except ClientError as e:
response_code = e.response['Error']['Code']
- eq(response_code, 'ServerSideEncryptionConfigurationNotFoundError')
+ assert response_code == 'ServerSideEncryptionConfigurationNotFoundError'
def _test_sse_s3_default_upload(file_size):
"""
data = 'A'*file_size
response = client.put_object(Bucket=bucket_name, Key='testobj', Body=data)
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256')
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256'
response = client.get_object(Bucket=bucket_name, Key='testobj')
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256')
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256'
body = _get_body(response)
- eq(body, data)
+ assert body == data
@attr(resource='object')
@attr(method='put')
data = 'A'*file_size
response = client.put_object(Bucket=bucket_name, Key='testobj', Body=data)
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'aws:kms')
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], kms_keyid)
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'aws:kms'
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'] == kms_keyid
response = client.get_object(Bucket=bucket_name, Key='testobj')
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'aws:kms')
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], kms_keyid)
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'aws:kms'
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'] == kms_keyid
body = _get_body(response)
- eq(body, data)
+ assert body == data
@attr(resource='object')
@attr(method='put')
client.put_object(Bucket=bucket_name, Key=key, Body=data)
response = client.head_object(Bucket=bucket_name, Key=key)
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256')
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256'
sse_s3_headers = {
'x-amz-server-side-encryption': 'AES256',
client.meta.events.register('before-call.s3.HeadObject', lf)
e = assert_raises(ClientError, client.head_object, Bucket=bucket_name, Key=key)
status, error_code = _get_status_and_error_code(e.response)
- eq(status, 400)
+ assert status == 400
@attr(resource='object')
@attr(method='put')
response = client.head_bucket(Bucket=bucket_name)
rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 1))
- eq(rgw_object_count, 1)
+ assert rgw_object_count == 1
rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', objlen))
- eq(rgw_bytes_used, objlen)
+ assert rgw_bytes_used == objlen
lf = (lambda **kwargs: kwargs['params']['headers'].update(part_headers))
client.meta.events.register('before-call.s3.UploadPart', lf)
response = client.get_object(Bucket=bucket_name, Key=key)
- eq(response['Metadata'], metadata)
- eq(response['ResponseMetadata']['HTTPHeaders']['content-type'], content_type)
+ assert response['Metadata'] == metadata
+ assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == content_type
body = _get_body(response)
- eq(body, data)
+ assert body == data
size = response['ContentLength']
- eq(len(body), size)
+ assert len(body) == size
_check_content_using_range(key, bucket_name, data, 1000000)
_check_content_using_range(key, bucket_name, data, 10000000)
('file', ('bar'))])
r = requests.post(url, files = payload)
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key='foo.txt')
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256')
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256'
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
@attr(resource='object')
@attr(method='post')
('file', ('bar'))])
r = requests.post(url, files = payload)
- eq(r.status_code, 204)
+ assert r.status_code == 204
response = client.get_object(Bucket=bucket_name, Key='foo.txt')
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'aws:kms')
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], kms_keyid)
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'aws:kms'
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'] == kms_keyid
body = _get_body(response)
- eq(body, 'bar')
+ assert body == 'bar'
def _test_sse_s3_encrypted_upload(file_size):
data = 'A'*file_size
response = client.put_object(Bucket=bucket_name, Key='testobj', Body=data, ServerSideEncryption='AES256')
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256')
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256'
response = client.get_object(Bucket=bucket_name, Key='testobj')
- eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256')
+ assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256'
body = _get_body(response)
- eq(body, data)
+ assert body == data
@attr(resource='object')
@attr(method='put')
import botocore.session
from botocore.exceptions import ClientError
from botocore.exceptions import ParamValidationError
-from nose.tools import eq_ as eq
from nose.plugins.attrib import attr
import pytest
import isodate
user_policy = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":[\"*\"],\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}},{\"Effect\":\"Allow\",\"Action\":\"sts:GetSessionToken\",\"Resource\":\"*\",\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}}]}"
(resp_err,resp,policy_name)=put_user_policy(iam_client,sts_user_id,None,user_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
response=sts_client.get_session_token()
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client=boto3.client('s3',
aws_access_key_id = response['Credentials']['AccessKeyId'],
bucket_name = get_new_bucket_name()
try:
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
finish=s3_client.delete_bucket(Bucket=bucket_name)
finally: # clean up user policy even if create_bucket/delete_bucket fails
iam_client.delete_user_policy(UserName=sts_user_id,PolicyName=policy_name)
user_policy = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":[\"*\"],\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}},{\"Effect\":\"Allow\",\"Action\":\"sts:GetSessionToken\",\"Resource\":\"*\",\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}}]}"
(resp_err,resp,policy_name)=put_user_policy(iam_client,sts_user_id,None,user_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
response=sts_client.get_session_token()
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client=boto3.client('s3',
aws_access_key_id = s3_main_access_key,
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
- eq(s3bucket_error,'AccessDenied')
+ assert s3bucket_error == 'AccessDenied'
iam_client.delete_user_policy(UserName=sts_user_id,PolicyName=policy_name)
@attr(resource='assume role')
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bkt = s3_client.delete_bucket(Bucket=bucket_name)
- eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
+ assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204
@attr(resource='assume role')
@attr(method='get')
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
- eq(s3bucket_error,'AccessDenied')
+ assert s3bucket_error == 'AccessDenied'
@attr(resource='assume role')
@attr(method='get')
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,DurationSeconds=900)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
time.sleep(900)
s3_client = boto3.client('s3',
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
- eq(s3bucket_error,'AccessDenied')
+ assert s3bucket_error == 'AccessDenied'
@attr(resource='assume role')
@attr(method='head')
policy_document = '{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["arn:aws:iam:::user/'+sts_user_id+'"]},"Action":["sts:AssumeRole"]}]}'
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name)
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name
# allow GetObject but deny ListBucket
role_policy = '{"Version":"2012-10-17","Statement":{"Effect":"Allow","Action":"s3:GetObject","Principal":"*","Resource":"arn:aws:s3:::*"}}'
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3_client.head_object(Bucket=bucket_name, Key='nonexistent')
except ClientError as e:
status = e.response['ResponseMetadata']['HTTPStatusCode']
- eq(status,403)
+ assert status == 403
@attr(resource='assume role')
@attr(method='head')
policy_document = '{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["arn:aws:iam:::user/'+sts_user_id+'"]},"Action":["sts:AssumeRole"]}]}'
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name)
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name
# allow GetObject and ListBucket
role_policy = '{"Version":"2012-10-17","Statement":{"Effect":"Allow","Action":["s3:GetObject","s3:ListBucket"],"Principal":"*","Resource":"arn:aws:s3:::*"}}'
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3_client.head_object(Bucket=bucket_name, Key='nonexistent')
except ClientError as e:
status = e.response['ResponseMetadata']['HTTPStatusCode']
- eq(status,404)
+ assert status == 404
@attr(resource='assume role with web identity')
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bkt = s3_client.delete_bucket(Bucket=bucket_name)
- eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
+ assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=""
try:
log.debug('{}'.format(e.response.get("Error", {}).get("Code")))
log.debug('{}'.format(e))
resp_error = e.response.get("Error", {}).get("Code")
- eq(resp_error,'AccessDenied')
+ assert resp_error == 'AccessDenied'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"arn:aws:s3:::test2\",\"arn:aws:s3:::test2/*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\",\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3bucket = s3_client.create_bucket(Bucket=bucket_name_1)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
- eq(s3bucket_error, 'AccessDenied')
+ assert s3bucket_error == 'AccessDenied'
bucket_name_2 = 'test2'
try:
s3bucket = s3_client.create_bucket(Bucket=bucket_name_2)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
- eq(s3bucket_error, 'AccessDenied')
+ assert s3bucket_error == 'AccessDenied'
bucket_body = 'please-write-something'
#body.encode(encoding='utf_8')
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
- eq(s3_put_obj_error,'NoSuchBucket')
+ assert s3_put_obj_error == 'NoSuchBucket'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\",\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
- eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
- eq(s3_put_obj_error, 'AccessDenied')
+ assert s3_put_obj_error == 'AccessDenied'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\",\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
- eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
- eq(s3_put_obj_error, 'AccessDenied')
+ assert s3_put_obj_error == 'AccessDenied'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
- eq(s3_put_obj_error, 'AccessDenied')
+ assert s3_put_obj_error == 'AccessDenied'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Deny\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
- eq(s3_put_obj_error, 'AccessDenied')
+ assert s3_put_obj_error == 'AccessDenied'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3client_iamcreds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
resource1 = "arn:aws:s3:::" + bucket_name_1
resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*"
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
- eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200
try:
obj = s3_client.get_object(Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3object_error = e.response.get("Error", {}).get("Code")
- eq(s3object_error, 'AccessDenied')
+ assert s3object_error == 'AccessDenied'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3client_iamcreds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
resource1 = "arn:aws:s3:::" + bucket_name_1
resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*"
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
- eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200
s3_get_obj = s3_client.get_object(Bucket=bucket_name_1, Key="test-1.txt")
- eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_get_obj['ResponseMetadata']['HTTPStatusCode'] == 200
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3client_iamcreds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
resource1 = "arn:aws:s3:::" + bucket_name_1
resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*"
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
- eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200
copy_source = {
'Bucket': bucket_name_1,
s3_client.copy(copy_source, bucket_name_1, "test-2.txt")
s3_get_obj = s3_client.get_object(Bucket=bucket_name_1, Key="test-2.txt")
- eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_get_obj['ResponseMetadata']['HTTPStatusCode'] == 200
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
s3client_iamcreds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\",\"s3:GetObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3putobj_error = e.response.get("Error", {}).get("Code")
- eq(s3putobj_error, 'AccessDenied')
+ assert s3putobj_error == 'AccessDenied'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
s3client_iamcreds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
resource1 = "arn:aws:s3:::" + bucket_name_1
resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*"
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3putobj_error = e.response.get("Error", {}).get("Code")
- eq(s3putobj_error, 'AccessDenied')
+ assert s3putobj_error == 'AccessDenied'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":sub\":\""+sub+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bkt = s3_client.delete_bucket(Bucket=bucket_name)
- eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
+ assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":azp\":\""+azp+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bkt = s3_client.delete_bucket(Bucket=bucket_name)
- eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
+ assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bkt = s3_client.delete_bucket(Bucket=bucket_name)
- eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
+ assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"aws:PrincipalTag/Department\":\"Engineering\"}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bkt = s3_client.delete_bucket(Bucket=bucket_name)
- eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
+ assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"ForAllValues:StringEquals\":{\"aws:PrincipalTag/Department\":[\"Engineering\",\"Marketing\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bkt = s3_client.delete_bucket(Bucket=bucket_name)
- eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
+ assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
#ForAllValues: The condition returns true if every key value in the request matches at least one value in the policy
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"ForAllValues:StringEquals\":{\"aws:PrincipalTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
- eq(s3bucket_error,'AccessDenied')
+ assert s3bucket_error == 'AccessDenied'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:TagKeys\":\"Department\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"ForAnyValue:StringEquals\":{\"aws:PrincipalTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bkt = s3_client.delete_bucket(Bucket=bucket_name)
- eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
+ assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"aws:TagKeys\":[\"Department\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bkt = s3_client.delete_bucket(Bucket=bucket_name)
- eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
+ assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'},{'Key':'Department', 'Value': 'Marketing'}]})
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt")
- eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
- eq(s3_put_obj_error,'AccessDenied')
+ assert s3_put_obj_error == 'AccessDenied'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'WrongResourcetag'}]})
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
- eq(s3_put_obj_error,'AccessDenied')
+ assert s3_put_obj_error == 'AccessDenied'
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'}]})
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"${aws:PrincipalTag/Department}\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
tags = 'Department=Engineering&Department=Marketing'
key = "test-1.txt"
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key=key, Tagging=tags)
- eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200
s3_get_obj = s3_client.get_object(Bucket=bucket_name, Key=key)
- eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_get_obj['ResponseMetadata']['HTTPStatusCode'] == 200
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
#create two buckets and add same tags to both
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'}]})
copy_bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=copy_bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bucket_tagging = s3_res_iam_creds.BucketTagging(copy_bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'}]})
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"${aws:PrincipalTag/Department}\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
tags = 'Department=Engineering'
key = "test-1.txt"
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key=key, Tagging=tags)
- eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200
#copy to same bucket
copy_source = {
s3_client.copy(copy_source, bucket_name, "test-2.txt")
s3_get_obj = s3_client.get_object(Bucket=bucket_name, Key="test-2.txt")
- eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_get_obj['ResponseMetadata']['HTTPStatusCode'] == 200
#copy to another bucket
copy_source = {
s3_client.copy(copy_source, copy_bucket_name, "test-1.txt")
s3_get_obj = s3_client.get_object(Bucket=copy_bucket_name, Key="test-1.txt")
- eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_get_obj['ResponseMetadata']['HTTPStatusCode'] == 200
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
- eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200
bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'},{'Key':'Department', 'Value': 'Marketing'}]})
]
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None,tags_list)
- eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
+ assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
- eq(response['ResponseMetadata']['HTTPStatusCode'],200)
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
- eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
+ assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt")
- eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
+ assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]