Use py2/py3 compatible functions url packages.
Python3
from urllib.parse import parse_qs, urljoin, urlparse, urlencode
from urllib.request import urlopen, Request
from urllib.error import HTTPError
Python2
from urlparse import parse_qs, urljoin, urlparse
from urllib import urlencode
from urllib2 import urlopen, Request, HTTPError
Signed-off-by: Kyr Shatskyy <kyrylo.shatskyy@suse.com>
import logging
import os
-import urllib
+
+try:
+ from urllib.parse import urlencode
+except ImportError:
+ from urllib import urlencode
import requests
if kwargs:
if 'machine_type' in kwargs:
kwargs['machine_type'] = kwargs['machine_type'].replace(',','|')
- uri += '?' + urllib.urlencode(kwargs)
+ uri += '?' + urlencode(kwargs)
try:
response = requests.get(uri)
except requests.ConnectionError:
import sys
import tarfile
import time
-import urllib2
-import urlparse
import yaml
import json
import re
import pprint
+try:
+ from urllib.parse import urljoin
+ from urllib.request import urlopen
+ from urllib.error import HTTPError
+except ImportError:
+ from urlparse import urljoin
+ from urllib2 import urlopen, HTTPError
+
from netaddr.strategy.ipv4 import valid_str as _is_ipv4
from netaddr.strategy.ipv6 import valid_str as _is_ipv6
from teuthology import safepath
branch = 'master'
ref = branch
- sha1_url = urlparse.urljoin(BASE, 'ref/{ref}/sha1'.format(ref=ref))
+ sha1_url = urljoin(BASE, 'ref/{ref}/sha1'.format(ref=ref))
log.debug('Translating ref to sha1 using url %s', sha1_url)
try:
- sha1_fp = urllib2.urlopen(sha1_url)
+ sha1_fp = urlopen(sha1_url)
sha1 = sha1_fp.read().rstrip('\n')
sha1_fp.close()
- except urllib2.HTTPError as e:
+ except HTTPError as e:
log.error('Failed to get url %s', sha1_url)
raise e
log.debug('Using %s %s sha1 %s', package, format, sha1)
- bindir_url = urlparse.urljoin(BASE, 'sha1/{sha1}/'.format(sha1=sha1))
+ bindir_url = urljoin(BASE, 'sha1/{sha1}/'.format(sha1=sha1))
return (sha1, bindir_url)
import ast
import re
import requests
-import urllib
-import urlparse
+
+try:
+ from urllib.parse import urljoin, urlencode
+except ImportError:
+ from urlparse import urljoin
+ from urllib import urlencode
from collections import OrderedDict
from cStringIO import StringIO
req_obj['sha1'] = ref_val
else:
req_obj['ref'] = ref_val
- req_str = urllib.urlencode(req_obj)
- uri = urlparse.urljoin(
+ req_str = urlencode(req_obj)
+ uri = urljoin(
self.query_url,
'search',
) + '?%s' % req_str
@property
def repo_url(self):
self.assert_result()
- return urlparse.urljoin(
+ return urljoin(
self._result.json()[0]['chacra_url'],
'repo',
)
import requests
import socket
import time
-import urllib
import yaml
+try:
+ from urllib.parse import urlencode
+except ImportError:
+ from urllib import urlencode
+
from copy import deepcopy
from libcloud.common.exceptions import RateLimitReachedError, BaseHTTPError
log.exception("Could not destroy volume %s", vol)
def _update_dns(self):
- query = urllib.urlencode(dict(
+ query = urlencode(dict(
name=self.name,
ip=self.ips[0],
))
import socket
-import urlparse
import yaml
import os
+try:
+ from urllib.parse import parse_qs
+except ImportError:
+ from urlparse import parse_qs
+
+
from copy import deepcopy
from libcloud.compute.providers import get_driver
from mock import patch, Mock, DEFAULT
assert len(call_args) == 1
url_base, query_string = call_args[0][0][0].split('?')
assert url_base == 'nsupdate_url'
- parsed_query = urlparse.parse_qs(query_string)
+ parsed_query = parse_qs(query_string)
assert parsed_query == dict(name=['x'], ip=['y'])
@mark.parametrize(
import os
import re
import shlex
-import urlparse
+
+try:
+ from urllib.parse import urljoin
+except ImportError:
+ from urlparse import urljoin
from teuthology import misc as teuthology
from teuthology.parallel import parallel
if teuth_config.use_shaman:
if role_remote.os.package_type == 'rpm':
arch = builder.arch
- baseurl = urlparse.urljoin(
+ baseurl = urljoin(
builder.base_url,
'/'.join([arch, ''])
)
)
elif role_remote.os.package_type == 'deb':
arch = 'amd64' # FIXME
- baseurl = urlparse.urljoin(
+ baseurl = urljoin(
builder.base_url,
'/'.join([
'pool', 'main', 'l',
import os
import requests
import time
-import urllib
-import urlparse
+
+try:
+ from urllib.parse import urljoin, urlencode
+except ImportError:
+ from urlparse import urljoin
+ from urllib import urlencode
from teuthology.config import config as teuth_config
from teuthology.orchestra import run
def __init__(self, hosts, time_from, time_until='now'):
super(PCPGrapher, self).__init__(hosts, time_from, time_until)
- self.base_url = urlparse.urljoin(
+ self.base_url = urljoin(
teuth_config.pcp_host,
self._endpoint)
)
if self.time_until:
config['time_to'] = self._format_time(self.time_until)
- args = urllib.urlencode(config)
+ args = urlencode(config)
template = "{base_url}?{args}"
return template.format(base_url=self.base_url, args=args)
# 'target=' arg
'target': self.get_target_globs(metric),
})
- args = urllib.urlencode(config, doseq=True)
+ args = urlencode(config, doseq=True)
template = "{base_url}?{args}"
return template.format(base_url=self.base_url, args=args)
import os
import requests
-import urlparse
+
+try:
+ from urllib.parse import parse_qs, urljoin
+except ImportError:
+ from urlparse import parse_qs, urljoin
from mock import patch, DEFAULT, Mock, mock_open, call
from pytest import raises
assert obj.hosts == hosts
assert obj.time_from == time_from
assert obj.time_until == time_until
- expected_url = urlparse.urljoin(config.pcp_host, self.klass._endpoint)
+ expected_url = urljoin(config.pcp_host, self.klass._endpoint)
assert obj.base_url == expected_url
time_from=time_from,
time_until=time_until,
)
- base_url = urlparse.urljoin(
+ base_url = urljoin(
config.pcp_host,
'grafana/index.html#/dashboard/script/index.js',
)
assert obj.base_url == base_url
got_url = obj.build_graph_url()
- parsed_query = urlparse.parse_qs(got_url.split('?')[1])
+ parsed_query = parse_qs(got_url.split('?')[1])
assert parsed_query['hosts'] == hosts
assert len(parsed_query['time_from']) == 1
assert parsed_query['time_from'][0] == time_from