pass
class ZoneNotFound(ClientException):
pass
+class BucketEmpty(ClientException):
+ pass
def parse_endpoint(endpoint):
url = urlparse(endpoint)
def list_objects_in_bucket(connection, bucket_name):
# use the boto library to do this
bucket = connection.get_bucket(bucket_name)
- for key in bucket.list():
- yield key.name
+ try:
+ for key in bucket.list():
+ yield key.name
+ except boto.exception.S3ResponseError as e:
+ # since this is a generator, the exception will be raised when
+ # it's read, rather than when this call returns, so raise a
+ # unique exception to distinguish this from client errors from
+ # other calls
+ if e.status == 404:
+ raise BucketEmpty()
+ else:
+ raise
@boto_call
exc_message = exc.exconly()
assert 'state is error' in exc_message
+
+ def test_sync_bucket_delayed_not_found(self):
+ class fake_iterable(object):
+ def __iter__(self):
+ raise client.BucketEmpty
+ with patch('radosgw_agent.worker.client', self.client):
+ w = worker.DataWorker(None, None, None, self.src, None, daemon_id=1)
+ w.sync_object = lambda *a: None
+ objects = fake_iterable()
+ with py.test.raises(client.BucketEmpty):
+ w.sync_bucket('foo', objects)
log.debug('bucket instance is "%s" with marker %s', instance, marker)
objects = client.list_objects_in_bucket(self.src_conn, bucket)
- if not objects:
- return True
- except Exception as e:
- log.error('error preparing for full sync of bucket "%s": %s',
- bucket, e)
- return False
-
- retries = self.sync_bucket(bucket, objects)
+ retries = self.sync_bucket(bucket, objects)
- result = self.set_bound(instance, marker, retries, 'bucket-index')
- return not retries and result == RESULT_SUCCESS
+ result = self.set_bound(instance, marker, retries, 'bucket-index')
+ return not retries and result == RESULT_SUCCESS
+ except client.BucketEmpty:
+ log.debug('no objects in bucket %s', bucket)
+ return True
+ except Exception:
+ log.exception('error preparing for full sync of bucket "%s"',
+ bucket)
+ return False
def run(self):
self.prepare_lock()