]> git-server-git.apps.pok.os.sepia.ceph.com Git - teuthology.git/commitdiff
python3: use compatible urllib packages
authorKyr Shatskyy <kyrylo.shatskyy@gmail.com>
Fri, 11 Oct 2019 23:42:00 +0000 (01:42 +0200)
committerKyr Shatskyy <kyrylo.shatskyy@suse.com>
Thu, 14 Nov 2019 15:00:20 +0000 (16:00 +0100)
Use py2/py3 compatible functions url packages.

Python3
    from urllib.parse import parse_qs, urljoin, urlparse, urlencode
    from urllib.request import urlopen, Request
    from urllib.error import HTTPError
Python2
    from urlparse import parse_qs, urljoin, urlparse
    from urllib import urlencode
    from urllib2 import urlopen, Request, HTTPError

Signed-off-by: Kyr Shatskyy <kyrylo.shatskyy@suse.com>
teuthology/lock/query.py
teuthology/misc.py
teuthology/packaging.py
teuthology/provision/cloud/openstack.py
teuthology/provision/cloud/test/test_openstack.py
teuthology/task/kernel.py
teuthology/task/pcp.py
teuthology/test/task/test_pcp.py

index 9dcdd77015d8f46468281f47a700f4d430b0523f..99e5deafb54b703a55336bd37af335db27607507 100644 (file)
@@ -1,6 +1,10 @@
 import logging
 import os
-import urllib
+
+try:
+    from urllib.parse import urlencode
+except ImportError:
+    from urllib import urlencode
 
 import requests
 
@@ -58,7 +62,7 @@ def list_locks(keyed_by_name=False, **kwargs):
     if kwargs:
         if 'machine_type' in kwargs:
             kwargs['machine_type'] = kwargs['machine_type'].replace(',','|')
-        uri += '?' + urllib.urlencode(kwargs)
+        uri += '?' + urlencode(kwargs)
     try:
         response = requests.get(uri)
     except requests.ConnectionError:
index 037755e5eb560835232174f67fb53d06b76b3aa3..f7fba973cd7ed40f911f85ea59d30d5b6e5ca64f 100644 (file)
@@ -13,13 +13,19 @@ import subprocess
 import sys
 import tarfile
 import time
-import urllib2
-import urlparse
 import yaml
 import json
 import re
 import pprint
 
+try:
+    from urllib.parse import urljoin
+    from urllib.request import urlopen
+    from urllib.error import HTTPError
+except ImportError:
+    from urlparse import urljoin
+    from urllib2 import urlopen,  HTTPError
+
 from netaddr.strategy.ipv4 import valid_str as _is_ipv4
 from netaddr.strategy.ipv6 import valid_str as _is_ipv6
 from teuthology import safepath
@@ -230,19 +236,19 @@ def get_ceph_binary_url(package=None,
                 branch = 'master'
             ref = branch
 
-        sha1_url = urlparse.urljoin(BASE, 'ref/{ref}/sha1'.format(ref=ref))
+        sha1_url = urljoin(BASE, 'ref/{ref}/sha1'.format(ref=ref))
         log.debug('Translating ref to sha1 using url %s', sha1_url)
 
         try:
-            sha1_fp = urllib2.urlopen(sha1_url)
+            sha1_fp = urlopen(sha1_url)
             sha1 = sha1_fp.read().rstrip('\n')
             sha1_fp.close()
-        except urllib2.HTTPError as e:
+        except HTTPError as e:
             log.error('Failed to get url %s', sha1_url)
             raise e
 
     log.debug('Using %s %s sha1 %s', package, format, sha1)
-    bindir_url = urlparse.urljoin(BASE, 'sha1/{sha1}/'.format(sha1=sha1))
+    bindir_url = urljoin(BASE, 'sha1/{sha1}/'.format(sha1=sha1))
     return (sha1, bindir_url)
 
 
index 6477c970873d62ec8f270fefebce78fed3b51d79..78e94338d549051e7f26894340bba5dd523f496a 100644 (file)
@@ -2,8 +2,12 @@ import logging
 import ast
 import re
 import requests
-import urllib
-import urlparse
+
+try:
+    from urllib.parse import urljoin, urlencode
+except ImportError:
+    from urlparse import urljoin
+    from urllib import urlencode
 
 from collections import OrderedDict
 from cStringIO import StringIO
@@ -887,8 +891,8 @@ class ShamanProject(GitbuilderProject):
             req_obj['sha1'] = ref_val
         else:
             req_obj['ref'] = ref_val
-        req_str = urllib.urlencode(req_obj)
-        uri = urlparse.urljoin(
+        req_str = urlencode(req_obj)
+        uri = urljoin(
             self.query_url,
             'search',
         ) + '?%s' % req_str
@@ -962,7 +966,7 @@ class ShamanProject(GitbuilderProject):
     @property
     def repo_url(self):
         self.assert_result()
-        return urlparse.urljoin(
+        return urljoin(
             self._result.json()[0]['chacra_url'],
             'repo',
         )
index bdfbbf2793e0c690f740fa6924712ad9886eaf21..5882d57ad4f304020a29fb3010c746979ebcf5f4 100644 (file)
@@ -3,9 +3,13 @@ import re
 import requests
 import socket
 import time
-import urllib
 import yaml
 
+try:
+    from urllib.parse import  urlencode
+except ImportError:
+    from urllib import urlencode
+
 from copy import deepcopy
 from libcloud.common.exceptions import RateLimitReachedError, BaseHTTPError
 
@@ -289,7 +293,7 @@ class OpenStackProvisioner(base.Provisioner):
                 log.exception("Could not destroy volume %s", vol)
 
     def _update_dns(self):
-        query = urllib.urlencode(dict(
+        query = urlencode(dict(
             name=self.name,
             ip=self.ips[0],
         ))
index 1a99011bfc23b8f73e32aef1e05e3d77b16ee9a8..cc3f26c256ec65862298eba122d6ef88059c642b 100644 (file)
@@ -1,8 +1,13 @@
 import socket
-import urlparse
 import yaml
 import os
 
+try:
+    from urllib.parse import parse_qs
+except ImportError:
+    from urlparse import parse_qs
+
+
 from copy import deepcopy
 from libcloud.compute.providers import get_driver
 from mock import patch, Mock, DEFAULT
@@ -657,7 +662,7 @@ class TestOpenStackProvisioner(TestOpenStackBase):
         assert len(call_args) == 1
         url_base, query_string = call_args[0][0][0].split('?')
         assert url_base == 'nsupdate_url'
-        parsed_query = urlparse.parse_qs(query_string)
+        parsed_query = parse_qs(query_string)
         assert parsed_query == dict(name=['x'], ip=['y'])
 
     @mark.parametrize(
index ffb236fdb75ff1d06ea5df30ed710cdaa040b8d0..833fcc079872ac96eb2d3875382f7fb05169f33b 100644 (file)
@@ -7,7 +7,11 @@ import logging
 import os
 import re
 import shlex
-import urlparse
+
+try:
+    from urllib.parse import urljoin
+except ImportError:
+    from urlparse import urljoin
 
 from teuthology import misc as teuthology
 from teuthology.parallel import parallel
@@ -370,7 +374,7 @@ def download_kernel(ctx, config):
             if teuth_config.use_shaman:
                 if role_remote.os.package_type == 'rpm':
                     arch = builder.arch
-                    baseurl = urlparse.urljoin(
+                    baseurl = urljoin(
                         builder.base_url,
                         '/'.join([arch, ''])
                     )
@@ -380,7 +384,7 @@ def download_kernel(ctx, config):
                     )
                 elif role_remote.os.package_type == 'deb':
                     arch = 'amd64'  # FIXME
-                    baseurl = urlparse.urljoin(
+                    baseurl = urljoin(
                         builder.base_url,
                         '/'.join([
                             'pool', 'main', 'l',
index a09c56c3656bf2eeebfc0aa8d93a1d31b4e59ecb..d016327f08b5c11bf06b2b274feff2a4b3b7b548 100644 (file)
@@ -6,8 +6,12 @@ import logging
 import os
 import requests
 import time
-import urllib
-import urlparse
+
+try:
+    from urllib.parse import urljoin, urlencode
+except ImportError:
+    from urlparse import urljoin
+    from urllib import urlencode
 
 from teuthology.config import config as teuth_config
 from teuthology.orchestra import run
@@ -64,7 +68,7 @@ class PCPGrapher(PCPDataSource):
 
     def __init__(self, hosts, time_from, time_until='now'):
         super(PCPGrapher, self).__init__(hosts, time_from, time_until)
-        self.base_url = urlparse.urljoin(
+        self.base_url = urljoin(
             teuth_config.pcp_host,
             self._endpoint)
 
@@ -83,7 +87,7 @@ class GrafanaGrapher(PCPGrapher):
         )
         if self.time_until:
             config['time_to'] = self._format_time(self.time_until)
-        args = urllib.urlencode(config)
+        args = urlencode(config)
         template = "{base_url}?{args}"
         return template.format(base_url=self.base_url, args=args)
 
@@ -183,7 +187,7 @@ class GraphiteGrapher(PCPGrapher):
             # 'target=' arg
             'target': self.get_target_globs(metric),
         })
-        args = urllib.urlencode(config, doseq=True)
+        args = urlencode(config, doseq=True)
         template = "{base_url}?{args}"
         return template.format(base_url=self.base_url, args=args)
 
index 1331be044b51b6e0aed7709abedfc2bbc128d019..68b9093e2478167ebf522a904887a172966a9751 100644 (file)
@@ -1,6 +1,10 @@
 import os
 import requests
-import urlparse
+
+try:
+    from urllib.parse import parse_qs, urljoin
+except ImportError:
+    from urlparse import parse_qs, urljoin
 
 from mock import patch, DEFAULT, Mock, mock_open, call
 from pytest import raises
@@ -87,7 +91,7 @@ class TestPCPGrapher(TestPCPDataSource):
         assert obj.hosts == hosts
         assert obj.time_from == time_from
         assert obj.time_until == time_until
-        expected_url = urlparse.urljoin(config.pcp_host, self.klass._endpoint)
+        expected_url = urljoin(config.pcp_host, self.klass._endpoint)
         assert obj.base_url == expected_url
 
 
@@ -103,13 +107,13 @@ class TestGrafanaGrapher(TestPCPGrapher):
             time_from=time_from,
             time_until=time_until,
         )
-        base_url = urlparse.urljoin(
+        base_url = urljoin(
             config.pcp_host,
             'grafana/index.html#/dashboard/script/index.js',
         )
         assert obj.base_url == base_url
         got_url = obj.build_graph_url()
-        parsed_query = urlparse.parse_qs(got_url.split('?')[1])
+        parsed_query = parse_qs(got_url.split('?')[1])
         assert parsed_query['hosts'] == hosts
         assert len(parsed_query['time_from']) == 1
         assert parsed_query['time_from'][0] == time_from