From: Oleh Prypin Date: Wed, 13 Jul 2016 08:20:31 +0000 (+0300) Subject: tests: Make RGW tests compatible with Python 3 X-Git-Tag: ses5-milestone5~254^2~1 X-Git-Url: http://git-server-git.apps.pok.os.sepia.ceph.com/?a=commitdiff_plain;h=75c71b4e833315d63fd67cf0b8ff4d1a7c1a7855;p=ceph.git tests: Make RGW tests compatible with Python 3 Signed-off-by: Oleh Prypin --- diff --git a/src/test/rgw/test_multen.py b/src/test/rgw/test_multen.py index e74396171a6f..91464d333ee7 100644 --- a/src/test/rgw/test_multen.py +++ b/src/test/rgw/test_multen.py @@ -3,8 +3,6 @@ import json import sys -from StringIO import StringIO - from boto.s3.connection import S3Connection, OrdinaryCallingFormat # XXX once we're done, break out the common code into a library module @@ -32,7 +30,7 @@ def test2(cluster): raise TestException("failed command: user create --uid %s" % uid) try: - outj = json.loads(out) + outj = json.loads(out.decode('utf-8')) except ValueError: raise TestException("invalid json after: user create --uid %s" % uid) if not isinstance(outj, dict): @@ -63,7 +61,7 @@ def test3(cluster): raise TestException("failed command: user create --uid %s" % uid) try: - outj = json.loads(out) + outj = json.loads(out.decode('utf-8')) except ValueError: raise TestException("invalid json after: user create --uid %s" % uid) if not isinstance(outj, dict): @@ -101,7 +99,7 @@ def test4(cluster): raise TestException("failed command: user create --uid %s" % uid) try: - outj = json.loads(out) + outj = json.loads(out.decode('utf-8')) except ValueError: raise TestException("invalid json after: user create --uid %s" % uid) if not isinstance(outj, dict): @@ -125,7 +123,7 @@ def test4(cluster): raise TestException("failed command: key create --uid %s" % uid) try: - outj = json.loads(out) + outj = json.loads(out.decode('utf-8')) except ValueError: raise TestException("invalid json after: key create --uid %s" % uid) if not isinstance(outj, dict): @@ -163,7 +161,7 @@ def test5_add_s3_key(cluster, tid, uid): raise TestException("failed command: key create --uid %s" % uid) try: - outj = json.loads(out) + outj = json.loads(out.decode('utf-8')) except ValueError: raise TestException("invalid json after: key create --uid %s" % uid) if not isinstance(outj, dict): @@ -195,7 +193,7 @@ def test5_add_swift_key(cluster, tid, uid, subid): raise TestException("failed command: key create --uid %s" % uid) try: - outj = json.loads(out) + outj = json.loads(out.decode('utf-8')) except ValueError: raise TestException("invalid json after: key create --uid %s" % uid) if not isinstance(outj, dict): @@ -238,7 +236,7 @@ def test5_make_user(cluster, tid, uid, subid): if ret != 0: raise TestException("failed command: user create --uid %s" % uid) try: - outj = json.loads(out) + outj = json.loads(out.decode('utf-8')) except ValueError: raise TestException("invalid json after: user create --uid %s" % uid) if not isinstance(outj, dict): @@ -282,7 +280,7 @@ def test5_poke_s3(cluster): key = bucket.new_key(objname) headers = { "Content-Type": "text/plain" } - key.set_contents_from_string("Test5A\n", headers) + key.set_contents_from_string(b"Test5A\n", headers) key.set_acl('public-read') # @@ -302,7 +300,7 @@ def test5_poke_s3(cluster): key = bucket.new_key(objname) headers = { "Content-Type": "text/plain" } - key.set_contents_from_string("Test5B\n", headers) + key.set_contents_from_string(b"Test5B\n", headers) key.set_acl('public-read') # @@ -324,14 +322,14 @@ def test5_poke_s3(cluster): key = bucket.get_key(objname) body = key.get_contents_as_string() - if body != "Test5A\n": + if body != b"Test5A\n": raise TestException("failed body check, bucket %s object %s" % (bucketname, objname)) bucket = c.get_bucket("test5b:"+bucketname) key = bucket.get_key(objname) body = key.get_contents_as_string() - if body != "Test5B\n": + if body != b"Test5B\n": raise TestException( "failed body check, tenant %s bucket %s object %s" % ("test5b", bucketname, objname)) diff --git a/src/test/rgw/test_multi.py b/src/test/rgw/test_multi.py index 698c367601b0..950622c1484d 100644 --- a/src/test/rgw/test_multi.py +++ b/src/test/rgw/test_multi.py @@ -6,9 +6,14 @@ import string import argparse import sys import time -import itertools - -import ConfigParser +try: + from itertools import izip_longest as zip_longest +except ImportError: + from itertools import zip_longest +try: + import configparser +except ImportError: + import ConfigParser as configparser import boto import boto.s3.connection @@ -40,7 +45,7 @@ def log(level, *params): if p: s += str(p) - print s + print(s) sys.stdout.flush() def build_cmd(*params): @@ -70,7 +75,7 @@ def bash(cmd, check_retcode = True): log(5, 'running cmd: ', cmd) process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE) s = process.communicate()[0] - log(20, 'command returned status=', process.returncode, ' stdout=', s) + log(20, 'command returned status=', process.returncode, ' stdout=', s.decode('utf-8')) if check_retcode: assert(process.returncode == 0) return (s, process.returncode) @@ -183,7 +188,7 @@ class RGWRealm: return self.zones[zone_name] def get_zones(self): - for (k, zone) in self.zones.iteritems(): + for (k, zone) in self.zones.items(): yield zone def meta_sync_status(self, zone): @@ -197,6 +202,7 @@ class RGWRealm: assert(retcode == 2) # ENOENT + meta_sync_status_json = meta_sync_status_json.decode('utf-8') log(20, 'current meta sync status=', meta_sync_status_json) sync_status = json.loads(meta_sync_status_json) @@ -208,14 +214,14 @@ class RGWRealm: assert(num_shards == len(sync_markers)) markers={} - for i in xrange(num_shards): + for i in range(num_shards): markers[i] = sync_markers[i]['val']['marker'] return (num_shards, markers) def meta_master_log_status(self, master_zone): (mdlog_status_json, retcode) = master_zone.cluster.rgw_admin_ro('--rgw-realm=' + self.realm + ' mdlog status') - mdlog_status = json.loads(mdlog_status_json) + mdlog_status = json.loads(mdlog_status_json.decode('utf-8')) markers={} i = 0 @@ -233,7 +239,7 @@ class RGWRealm: return False msg = '' - for i, l, s in zip(log_status, log_status.itervalues(), sync_status.itervalues()): + for i, l, s in zip(log_status, log_status.values(), sync_status.values()): if l > s: if len(s) != 0: msg += ', ' @@ -282,6 +288,7 @@ class RGWRealm: assert(retcode == 2) # ENOENT + data_sync_status_json = data_sync_status_json.decode('utf-8') log(20, 'current data sync status=', data_sync_status_json) sync_status = json.loads(data_sync_status_json) @@ -293,7 +300,7 @@ class RGWRealm: assert(num_shards == len(sync_markers)) markers={} - for i in xrange(num_shards): + for i in range(num_shards): markers[i] = sync_markers[i]['val']['marker'] return (num_shards, markers) @@ -313,6 +320,7 @@ class RGWRealm: assert(retcode == 2) # ENOENT + bucket_sync_status_json = bucket_sync_status_json.decode('utf-8') log(20, 'current bucket sync status=', bucket_sync_status_json) sync_status = json.loads(bucket_sync_status_json) @@ -330,7 +338,7 @@ class RGWRealm: def data_source_log_status(self, source_zone): source_cluster = source_zone.cluster (datalog_status_json, retcode) = source_cluster.rgw_admin_ro('--rgw-realm=' + self.realm + ' datalog status') - datalog_status = json.loads(datalog_status_json) + datalog_status = json.loads(datalog_status_json.decode('utf-8')) markers={} i = 0 @@ -349,7 +357,7 @@ class RGWRealm: cmd += ' --tenant=' + user.tenant + ' --uid=' + user.uid source_cluster = source_zone.cluster (bilog_status_json, retcode) = source_cluster.rgw_admin_ro(cmd) - bilog_status = json.loads(bilog_status_json) + bilog_status = json.loads(bilog_status_json.decode('utf-8')) m={} markers={} @@ -373,7 +381,7 @@ class RGWRealm: return False msg = '' - for i, l, s in zip(log_status, log_status.itervalues(), sync_status.itervalues()): + for i, l, s in zip(log_status, log_status.values(), sync_status.values()): if l > s: if len(s) != 0: msg += ', ' @@ -391,7 +399,7 @@ class RGWRealm: return False msg = '' - for i, l, s in zip(log_status, log_status.itervalues(), sync_status.itervalues()): + for i, l, s in zip(log_status, log_status.values(), sync_status.values()): if l > s: if len(s) != 0: msg += ', ' @@ -458,7 +466,7 @@ class RGWRealm: def set_master_zone(self, zone): (zg_json, retcode) = zone.cluster.rgw_admin('--rgw-realm=' + self.realm + ' --rgw-zonegroup=' + zone.zg + ' --rgw-zone=' + zone.zone_name + ' zone modify --master=1') (period_json, retcode) = zone.cluster.rgw_admin('--rgw-realm=' + self.realm + ' period update --commit') - self.master_zone = zone + self.master_zone = zone class RGWUser: @@ -488,7 +496,7 @@ class RGWMulti: self.base_port = 8000 self.clusters = {} - for i in xrange(num_clusters): + for i in range(num_clusters): self.clusters[i] = RGWCluster(i + 1, self.base_port + i) def setup(self, bootstrap, tenant): @@ -504,11 +512,11 @@ class RGWMulti: self.clusters[0].start() realm.init_zone(self.clusters[0], 'us', 'us-1', self.base_port) - for i in xrange(1, self.num_clusters): + for i in range(1, self.num_clusters): self.clusters[i].start() realm.init_zone(self.clusters[i], 'us', 'us-' + str(i + 1), self.base_port) else: - for i in xrange(0, self.num_clusters): + for i in range(0, self.num_clusters): realm.add_zone(self.clusters[i], 'us', 'us-' + str(i + 1), (i == 0)) realm.meta_checkpoint() @@ -590,7 +598,7 @@ def test_bucket_remove(): for zone in realm.get_zones(): assert check_all_buckets_exist(zone, buckets) - for zone, bucket_name in zone_bucket.iteritems(): + for zone, bucket_name in zone_bucket.items(): conn = zone.get_connection(user) conn.delete_bucket(bucket_name) @@ -645,7 +653,7 @@ def check_bucket_eq(zone1, zone2, bucket_name): for o in b2.get_all_versions(): log(20, 'o=', o.name) - for k1, k2 in itertools.izip_longest(b1.get_all_versions(), b2.get_all_versions()): + for k1, k2 in zip_longest(b1.get_all_versions(), b2.get_all_versions()): if k1 is None: log(0, 'failure: key=', k2.name, ' is missing from zone=', zone1.zone_name) assert False @@ -675,14 +683,14 @@ def test_object_sync(): content = 'asdasd' # don't wait for meta sync just yet - for zone, bucket_name in zone_bucket.iteritems(): + for zone, bucket_name in zone_bucket.items(): for objname in objnames: k = new_key(zone, bucket_name, objname) k.set_contents_from_string(content) realm.meta_checkpoint() - for source_zone, bucket in zone_bucket.iteritems(): + for source_zone, bucket in zone_bucket.items(): for target_zone in all_zones: if source_zone.zone_name == target_zone.zone_name: continue @@ -702,14 +710,14 @@ def test_object_delete(): content = 'asdasd' # don't wait for meta sync just yet - for zone, bucket in zone_bucket.iteritems(): + for zone, bucket in zone_bucket.items(): k = new_key(zone, bucket, objname) k.set_contents_from_string(content) realm.meta_checkpoint() # check object exists - for source_zone, bucket in zone_bucket.iteritems(): + for source_zone, bucket in zone_bucket.items(): for target_zone in all_zones: if source_zone.zone_name == target_zone.zone_name: continue @@ -719,7 +727,7 @@ def test_object_delete(): check_bucket_eq(source_zone, target_zone, bucket) # check object removal - for source_zone, bucket in zone_bucket.iteritems(): + for source_zone, bucket in zone_bucket.items(): k = get_key(source_zone, bucket, objname) k.delete() for target_zone in all_zones: @@ -741,7 +749,7 @@ def test_multi_period_incremental_sync(): for z in zone_bucket: all_zones.append(z) - for zone, bucket_name in zone_bucket.iteritems(): + for zone, bucket_name in zone_bucket.items(): for objname in [ 'p1', '_p1' ]: k = new_key(zone, bucket_name, objname) k.set_contents_from_string('asdasd') @@ -754,7 +762,7 @@ def test_multi_period_incremental_sync(): # change master to zone 2 -> period 2 realm.set_master_zone(realm.get_zone('us-2')) - for zone, bucket_name in zone_bucket.iteritems(): + for zone, bucket_name in zone_bucket.items(): if zone == z3: continue for objname in [ 'p2', '_p2' ]: @@ -767,7 +775,7 @@ def test_multi_period_incremental_sync(): # change master back to zone 1 -> period 3 realm.set_master_zone(realm.get_zone('us-1')) - for zone, bucket_name in zone_bucket.iteritems(): + for zone, bucket_name in zone_bucket.items(): if zone == z3: continue for objname in [ 'p3', '_p3' ]: @@ -779,7 +787,7 @@ def test_multi_period_incremental_sync(): realm.meta_checkpoint() # verify that we end up with the same objects - for source_zone, bucket in zone_bucket.iteritems(): + for source_zone, bucket in zone_bucket.items(): for target_zone in all_zones: if source_zone.zone_name == target_zone.zone_name: continue @@ -809,7 +817,7 @@ def test_zonegroup_remove(): realm.remove_zone('us-2') def init(parse_args): - cfg = ConfigParser.RawConfigParser({ + cfg = configparser.RawConfigParser({ 'num_zones': 3, 'no_bootstrap': 'false', 'log_level': 20, @@ -821,10 +829,10 @@ def init(parse_args): path = tpath('test_multi.conf') try: - with file(path) as f: + with open(path) as f: cfg.readfp(f) except: - print 'WARNING: error reading test config. Path can be set through the RGW_MULTI_TEST_CONF env variable' + print('WARNING: error reading test config. Path can be set through the RGW_MULTI_TEST_CONF env variable') pass parser = argparse.ArgumentParser(