ups = std::make_unique<RGWUserPubSub>(store, s->owner.get_id());
op_ret = ups->create_topic(topic_name, dest, topic_arn);
if (op_ret < 0) {
- ldout(s->cct, 1) << "failed to create topic, ret=" << op_ret << dendl;
+ ldout(s->cct, 1) << "failed to create topic '" << topic_name << "', ret=" << op_ret << dendl;
return;
}
+ ldout(s->cct, 20) << "successfully created topic '" << topic_name << "'" << dendl;
}
// command: PUT /topics/<topic-name>[&push-endpoint=<endpoint>[&<arg1>=<value1>]]
ldout(s->cct, 1) << "failed to get topics, ret=" << op_ret << dendl;
return;
}
+ ldout(s->cct, 20) << "successfully got topics" << dendl;
}
// command: GET /topics
ldout(s->cct, 1) << "failed to get topic '" << topic_name << "', ret=" << op_ret << dendl;
return;
}
+ ldout(s->cct, 1) << "successfully got topic '" << topic_name << "'" << dendl;
}
// command: GET /topics/<topic-name>
ups = std::make_unique<RGWUserPubSub>(store, s->owner.get_id());
op_ret = ups->remove_topic(topic_name);
if (op_ret < 0) {
- ldout(s->cct, 1) << "failed to remove topic, ret=" << op_ret << dendl;
+ ldout(s->cct, 1) << "failed to remove topic '" << topic_name << ", ret=" << op_ret << dendl;
return;
}
+ ldout(s->cct, 1) << "successfully removed topic '" << topic_name << "'" << dendl;
}
// command: DELETE /topics/<topic-name>
auto sub = ups->get_sub(sub_name);
op_ret = sub->subscribe(topic_name, dest);
if (op_ret < 0) {
- ldout(s->cct, 1) << "failed to create subscription, ret=" << op_ret << dendl;
+ ldout(s->cct, 1) << "failed to create subscription '" << sub_name << "', ret=" << op_ret << dendl;
return;
}
+ ldout(s->cct, 20) << "successfully created subscription '" << sub_name << "'" << dendl;
}
// command: PUT /subscriptions/<sub-name>?topic=<topic-name>[&push-endpoint=<endpoint>[&<arg1>=<value1>]]...
auto sub = ups->get_sub(sub_name);
op_ret = sub->get_conf(&result);
if (op_ret < 0) {
- ldout(s->cct, 1) << "failed to get subscription, ret=" << op_ret << dendl;
+ ldout(s->cct, 1) << "failed to get subscription '" << sub_name << "', ret=" << op_ret << dendl;
return;
}
+ ldout(s->cct, 20) << "successfully got subscription '" << sub_name << "'" << dendl;
}
// command: GET /subscriptions/<sub-name>
auto sub = ups->get_sub(sub_name);
op_ret = sub->unsubscribe(topic_name);
if (op_ret < 0) {
- ldout(s->cct, 1) << "failed to remove subscription, ret=" << op_ret << dendl;
+ ldout(s->cct, 1) << "failed to remove subscription '" << sub_name << "', ret=" << op_ret << dendl;
return;
}
+ ldout(s->cct, 20) << "successfully removed subscription '" << sub_name << "'" << dendl;
}
// command: DELETE /subscriptions/<sub-name>
auto sub = ups->get_sub_with_events(sub_name);
op_ret = sub->remove_event(event_id);
if (op_ret < 0) {
- ldout(s->cct, 1) << "failed to ack event, ret=" << op_ret << dendl;
+ ldout(s->cct, 1) << "failed to ack event on subscription '" << sub_name << "', ret=" << op_ret << dendl;
return;
}
+ ldout(s->cct, 20) << "successfully acked event on subscription '" << sub_name << "'" << dendl;
}
// command: POST /subscriptions/<sub-name>?ack&event-id=<event-id>
sub = ups->get_sub_with_events(sub_name);
if (!sub) {
op_ret = -ENOENT;
- ldout(s->cct, 1) << "failed to get subscription, ret=" << op_ret << dendl;
+ ldout(s->cct, 1) << "failed to get subscription '" << sub_name << "' for events, ret=" << op_ret << dendl;
return;
}
op_ret = sub->list_events(marker, max_entries);
if (op_ret < 0) {
- ldout(s->cct, 1) << "failed to get subscription events, ret=" << op_ret << dendl;
+ ldout(s->cct, 1) << "failed to get events from subscription '" << sub_name << "', ret=" << op_ret << dendl;
return;
}
+ ldout(s->cct, 20) << "successfully got events from subscription '" << sub_name << "'" << dendl;
}
// command: GET /subscriptions/<sub-name>?events[&max-entries=<max-entries>][&marker=<marker>]
auto b = ups->get_bucket(bucket_info.bucket);
op_ret = b->create_notification(topic_name, events);
if (op_ret < 0) {
- ldout(s->cct, 1) << "failed to create notification, ret=" << op_ret << dendl;
+ ldout(s->cct, 1) << "failed to create notification for topic '" << topic_name << "', ret=" << op_ret << dendl;
return;
}
+ ldout(s->cct, 20) << "successfully created notification for topic '" << topic_name << "'" << dendl;
}
namespace {
dest.bucket_name = string(conf["data_bucket_prefix"]) + s->owner.get_id().to_str() + "-" + unique_topic_name;
dest.oid_prefix = string(conf["data_oid_prefix"]) + sub_name + "/";
auto sub = ups->get_sub(sub_name);
- op_ret = sub->subscribe(unique_topic_name, dest, c.id);
+ op_ret = sub->subscribe(unique_topic_name, dest, sub_name);
if (op_ret < 0) {
ldout(s->cct, 1) << "failed to auto-generate subscription '" << sub_name << "', ret=" << op_ret << dendl;
// rollback generated notification (ignore return value)
ups->remove_topic(unique_topic_name);
return;
}
+ ldout(s->cct, 20) << "successfully auto-generated subscription '" << sub_name << "'" << dendl;
}
}
auto b = ups->get_bucket(bucket_info.bucket);
op_ret = b->remove_notification(topic_name);
if (op_ret < 0) {
- ldout(s->cct, 1) << "failed to remove notification, ret=" << op_ret << dendl;
+ ldout(s->cct, 1) << "failed to remove notification from topic '" << topic_name << "', ret=" << op_ret << dendl;
return;
}
+ ldout(s->cct, 20) << "successfully removed notification from topic '" << topic_name << "'" << dendl;
}
// command (extension to S3): DELETE /bucket?notification[=<notification-id>]
# create s3 notification
notification_name = bucket_name + NOTIFICATION_SUFFIX
generated_topic_name = notification_name+'_'+topic_name
- s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name,
- notification_name, topic_arn, ['s3:ObjectCreated:*'])
- response, status = s3_notification_conf.set_config()
+ topic_conf_list = [{'Id': notification_name,
+ 'TopicArn': topic_arn,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
+ _, status = s3_notification_conf.set_config()
assert_equal(status/100, 2)
zone_meta_checkpoint(ps_zones[0].zone)
# get auto-generated topic
assert_equal(status/100, 2)
assert_equal(parsed_result['topic'], generated_topic_name)
# delete s3 notification
- _, status = s3_notification_conf.del_config(all_notifications=False)
+ _, status = s3_notification_conf.del_config(notification=notification_name)
assert_equal(status/100, 2)
# delete topic
_, status = topic_conf.del_config()
topic_arn = parsed_result['arn']
# create s3 notification
notification_name = bucket_name + NOTIFICATION_SUFFIX
- s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name,
- notification_name, topic_arn, ['s3:ObjectCreated:*'])
- response, status = s3_notification_conf.set_config()
+ topic_conf_list = [{'Id': notification_name,
+ 'TopicArn': topic_arn,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
+ _, status = s3_notification_conf.set_config()
assert_equal(status/100, 2)
zone_meta_checkpoint(ps_zones[0].zone)
# get auto-generated subscription
topic_arn = parsed_result['arn']
# create one s3 notification
notification_name1 = bucket_name + NOTIFICATION_SUFFIX + '_1'
- s3_notification_conf1 = PSNotificationS3(ps_zones[0].conn, bucket_name,
- notification_name1, topic_arn, ['s3:ObjectCreated:*'])
+ topic_conf_list = [{'Id': notification_name1,
+ 'TopicArn': topic_arn,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf1 = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
response, status = s3_notification_conf1.set_config()
assert_equal(status/100, 2)
- # create another s3 notification
+ # create another s3 notification with the same topic
notification_name2 = bucket_name + NOTIFICATION_SUFFIX + '_2'
-
- s3_notification_conf2 = PSNotificationS3(ps_zones[0].conn, bucket_name,
- notification_name2, topic_arn, ['s3:ObjectCreated:*', 's3:ObjectRemoved:*'])
+ topic_conf_list = [{'Id': notification_name2,
+ 'TopicArn': topic_arn,
+ 'Events': ['s3:ObjectCreated:*', 's3:ObjectRemoved:*']
+ }]
+ s3_notification_conf2 = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
response, status = s3_notification_conf2.set_config()
assert_equal(status/100, 2)
zone_meta_checkpoint(ps_zones[0].zone)
assert_equal(response['TopicConfigurations'][1]['TopicArn'], topic_arn)
# get specific notification on a bucket
- response, status = s3_notification_conf1.get_config(all_notifications=False)
+ response, status = s3_notification_conf1.get_config(notification=notification_name1)
assert_equal(status/100, 2)
assert_equal(response['NotificationConfiguration']['TopicConfiguration']['Topic'], topic_arn)
assert_equal(response['NotificationConfiguration']['TopicConfiguration']['Id'], notification_name1)
- response, status = s3_notification_conf2.get_config(all_notifications=False)
+ response, status = s3_notification_conf2.get_config(notification=notification_name2)
assert_equal(status/100, 2)
assert_equal(response['NotificationConfiguration']['TopicConfiguration']['Topic'], topic_arn)
assert_equal(response['NotificationConfiguration']['TopicConfiguration']['Id'], notification_name2)
# delete specific notifications
- _, status = s3_notification_conf1.del_config(all_notifications=False)
+ _, status = s3_notification_conf1.del_config(notification=notification_name1)
assert_equal(status/100, 2)
- _, status = s3_notification_conf2.del_config(all_notifications=False)
+ _, status = s3_notification_conf2.del_config(notification=notification_name2)
assert_equal(status/100, 2)
# cleanup
_, status = topic_conf.del_config()
assert_equal(status/100, 2)
# verift topic is deleted
- result, _ = topic_conf.get_config()
+ result, status = topic_conf.get_config()
+ assert_equal(status, 404)
parsed_result = json.loads(result)
assert_equal(parsed_result['Code'], 'NoSuchKey')
zone_meta_checkpoint(ps_zones[0].zone)
# create s3 notification
notification_name = bucket_name + NOTIFICATION_SUFFIX
- s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name,
- notification_name, topic_arn, ['s3:ObjectCreated:*'])
+ topic_conf_list = [{'Id': notification_name,
+ 'TopicArn': topic_arn,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
_, status = s3_notification_conf.set_config()
assert_equal(status/100, 2)
# create objects in the bucket
def test_ps_s3_push_amqp():
- """ test pushing to amqp endpoint n s3 record format"""
+ """ test pushing to amqp endpoint s3 record format"""
return SkipTest("PubSub push tests are only manual")
zones, ps_zones = init_env()
bucket_name = gen_bucket_name()
endpoint_args='amqp-exchange=ex1&amqp-ack-level=none')
result, status = topic_conf.set_config()
assert_equal(status/100, 2)
- topic_arn = 'arn:aws:sns:::' + topic_name
+ parsed_result = json.loads(result)
+ topic_arn = parsed_result['arn']
# create bucket on the first of the rados zones
bucket = zones[0].create_bucket(bucket_name)
# wait for sync
zone_meta_checkpoint(ps_zones[0].zone)
# create s3 notification
notification_name = bucket_name + NOTIFICATION_SUFFIX
- s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name,
- notification_name, topic_arn, ['s3:ObjectCreated:*'])
+ topic_conf_list = [{'Id': notification_name,
+ 'TopicArn': topic_arn,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
_, status = s3_notification_conf.set_config()
assert_equal(status/100, 2)
# create objects in the bucket
topic_arn = parsed_result['arn']
# create one s3 notification
notification_name = bucket_name + NOTIFICATION_SUFFIX
- s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name,
- notification_name, topic_arn, ['s3:ObjectCreated:*'])
+ topic_conf_list = [{'Id': notification_name,
+ 'TopicArn': topic_arn,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
response, status = s3_notification_conf.set_config()
assert_equal(status/100, 2)
verify_s3_records_by_elements(parsed_result['Records'], keys, exact_match=False)
# s3 notification is deleted with bucket
- _, status = s3_notification_conf.get_config(all_notifications=False)
+ _, status = s3_notification_conf.get_config(notification=notification_name)
assert_equal(status, 404)
# non-s3 notification is deleted with bucket
_, status = notification_conf.get_config()
# create s3 notification
notification_name = bucket_name + NOTIFICATION_SUFFIX
topic_arn = 'arn:aws:sns:::' + topic_name
- s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name,
- notification_name, topic_arn, ['s3:ObjectCreated:*'])
+ topic_conf_list = [{'Id': notification_name,
+ 'TopicArn': topic_arn,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
try:
s3_notification_conf.set_config()
except:
# cleanup
zones[0].delete_bucket(bucket_name)
+
+
+def test_ps_s3_topic_update():
+ """ test updating topic associated with a notification"""
+ return SkipTest("PubSub push tests are only manual")
+ zones, ps_zones = init_env()
+ bucket_name = gen_bucket_name()
+ topic_name = bucket_name+TOPIC_SUFFIX
+
+ # create topic
+ dest_endpoint1 = 'amqp://localhost'
+ dest_args1 = 'amqp-exchange=ex1&amqp-ack-level=none'
+ dest_endpoint2 = 'http://localhost:9001'
+ topic_conf = PSTopic(ps_zones[0].conn, topic_name,
+ endpoint=dest_endpoint1,
+ endpoint_args=dest_args1)
+ result, status = topic_conf.set_config()
+ parsed_result = json.loads(result)
+ topic_arn = parsed_result['arn']
+ assert_equal(status/100, 2)
+ # get topic
+ result, _ = topic_conf.get_config()
+ # verify topic content
+ parsed_result = json.loads(result)
+ assert_equal(parsed_result['topic']['name'], topic_name)
+ assert_equal(parsed_result['topic']['dest']['push_endpoint'], dest_endpoint1)
+
+ # create bucket on the first of the rados zones
+ bucket = zones[0].create_bucket(bucket_name)
+ # wait for sync
+ zone_meta_checkpoint(ps_zones[0].zone)
+ # create s3 notification
+ notification_name = bucket_name + NOTIFICATION_SUFFIX
+ topic_conf_list = [{'Id': notification_name,
+ 'TopicArn': topic_arn,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
+ _, status = s3_notification_conf.set_config()
+ assert_equal(status/100, 2)
+ # create objects in the bucket
+ number_of_objects = 10
+ for i in range(number_of_objects):
+ key = bucket.new_key(str(i))
+ key.set_contents_from_string('bar')
+ # wait for sync
+ zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name)
+
+ # TODO: check update to amqp
+
+ # update the same topic
+ topic_conf = PSTopic(ps_zones[0].conn, topic_name,
+ endpoint=dest_endpoint2)
+ _, status = topic_conf.set_config()
+ assert_equal(status/100, 2)
+ # get topic
+ result, _ = topic_conf.get_config()
+ # verify topic content
+ parsed_result = json.loads(result)
+ assert_equal(parsed_result['topic']['name'], topic_name)
+ assert_equal(parsed_result['topic']['dest']['push_endpoint'], dest_endpoint2)
+
+ # create more objects in the bucket
+ number_of_objects = 10
+ for i in range(number_of_objects):
+ key = bucket.new_key(str(i+100))
+ key.set_contents_from_string('bar')
+ # wait for sync
+ zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name)
+
+ # TODO: check it is still updating amqp
+
+ # update notification to update the endpoint from the topic
+ topic_conf_list = [{'Id': notification_name,
+ 'TopicArn': topic_arn,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
+ _, status = s3_notification_conf.set_config()
+ assert_equal(status/100, 2)
+ # create even more objects in the bucket
+ number_of_objects = 10
+ for i in range(number_of_objects):
+ key = bucket.new_key(str(i+200))
+ key.set_contents_from_string('bar')
+ # wait for sync
+ zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name)
+
+ # TODO: check that updates switched to http
+
+ # cleanup
+ # delete objects from the bucket
+ for key in bucket.list():
+ key.delete()
+ s3_notification_conf.del_config()
+ topic_conf.del_config()
+ zones[0].delete_bucket(bucket_name)
+
+
+def test_ps_s3_notification_update():
+ """ test updating the topic of a notification"""
+ return SkipTest("PubSub push tests are only manual")
+ zones, ps_zones = init_env()
+ bucket_name = gen_bucket_name()
+ topic_name1 = bucket_name+'amqp'+TOPIC_SUFFIX
+
+ # create first topic
+ dest_endpoint1 = 'amqp://localhost'
+ dest_args1 = 'amqp-exchange=ex1&amqp-ack-level=none'
+ topic_conf1 = PSTopic(ps_zones[0].conn, topic_name1,
+ endpoint=dest_endpoint1,
+ endpoint_args=dest_args1)
+ result, status = topic_conf1.set_config()
+ parsed_result = json.loads(result)
+ topic_arn1 = parsed_result['arn']
+ assert_equal(status/100, 2)
+
+ # create bucket on the first of the rados zones
+ bucket = zones[0].create_bucket(bucket_name)
+ # wait for sync
+ zone_meta_checkpoint(ps_zones[0].zone)
+ # create s3 notification
+ notification_name = bucket_name + NOTIFICATION_SUFFIX
+ topic_conf_list = [{'Id': notification_name,
+ 'TopicArn': topic_arn1,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
+ _, status = s3_notification_conf.set_config()
+ assert_equal(status/100, 2)
+ # create objects in the bucket
+ number_of_objects = 10
+ for i in range(number_of_objects):
+ key = bucket.new_key(str(i))
+ key.set_contents_from_string('bar')
+ # wait for sync
+ zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name)
+ result, _ = s3_notification_conf.get_config()
+
+ # TODO: check updates to amqp
+
+ # create another topic
+ topic_name2 = bucket_name+'http'+TOPIC_SUFFIX
+ dest_endpoint2 = 'http://localhost:9001'
+ topic_conf2 = PSTopic(ps_zones[0].conn, topic_name2,
+ endpoint=dest_endpoint2)
+ result, status = topic_conf2.set_config()
+ parsed_result = json.loads(result)
+ topic_arn2 = parsed_result['arn']
+ assert_equal(status/100, 2)
+
+ # update notification to the new topic
+ topic_conf_list = [{'Id': notification_name,
+ 'TopicArn': topic_arn2,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
+ _, status = s3_notification_conf.set_config()
+ assert_equal(status/100, 2)
+ # create more objects in the bucket
+ number_of_objects = 10
+ for i in range(number_of_objects):
+ key = bucket.new_key(str(i+200))
+ key.set_contents_from_string('bar')
+ # wait for sync
+ zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name)
+
+ # TODO: check uodate to http
+ result, _ = s3_notification_conf.get_config()
+
+ # cleanup
+ # delete objects from the bucket
+ for key in bucket.list():
+ key.delete()
+ s3_notification_conf.del_config()
+ topic_conf1.del_config()
+ topic_conf2.del_config()
+ zones[0].delete_bucket(bucket_name)
+
+
+def test_ps_s3_multiple_topics_notification():
+ """ test notification creation with multiple topics"""
+ zones, ps_zones = init_env()
+ bucket_name = gen_bucket_name()
+ topic_name1 = bucket_name+'amqp'+TOPIC_SUFFIX
+ topic_name2 = bucket_name+'http'+TOPIC_SUFFIX
+
+ # create topics
+ dest_endpoint1 = 'amqp://localhost'
+ dest_args1 = 'amqp-exchange=ex1&amqp-ack-level=none'
+ dest_endpoint2 = 'http://localhost:9001'
+ topic_conf1 = PSTopic(ps_zones[0].conn, topic_name1,
+ endpoint=dest_endpoint1,
+ endpoint_args=dest_args1)
+ result, status = topic_conf1.set_config()
+ parsed_result = json.loads(result)
+ topic_arn1 = parsed_result['arn']
+ assert_equal(status/100, 2)
+ topic_conf2 = PSTopic(ps_zones[0].conn, topic_name2,
+ endpoint=dest_endpoint2)
+ result, status = topic_conf2.set_config()
+ parsed_result = json.loads(result)
+ topic_arn2 = parsed_result['arn']
+ assert_equal(status/100, 2)
+
+ # create bucket on the first of the rados zones
+ bucket = zones[0].create_bucket(bucket_name)
+ # wait for sync
+ zone_meta_checkpoint(ps_zones[0].zone)
+ # create s3 notification
+ notification_name = bucket_name + NOTIFICATION_SUFFIX
+ topic_conf_list = [
+ {
+ 'Id': notification_name + '_1',
+ 'TopicArn': topic_arn1,
+ 'Events': ['s3:ObjectCreated:*']
+ },
+ {
+ 'Id': notification_name + '_2',
+ 'TopicArn': topic_arn2,
+ 'Events': ['s3:ObjectCreated:*']
+ }]
+ s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, topic_conf_list)
+ _, status = s3_notification_conf.set_config()
+ assert_equal(status/100, 2)
+ result, _ = s3_notification_conf.get_config()
+ print('first try')
+ print(result)
+ # FIXME: this is currently failing
+ #assert_equal(len(result['TopicConfigurations']), 2)
+
+ _, status = s3_notification_conf.set_config()
+ assert_equal(status/100, 2)
+ result, _ = s3_notification_conf.get_config()
+ print('second try')
+ print(result)
+ assert_equal(len(result['TopicConfigurations']), 2)
+
+ # cleanup
+ s3_notification_conf.del_config()
+ topic_conf1.del_config()
+ topic_conf2.del_config()
+ zones[0].delete_bucket(bucket_name)