Since python3 does not have file() we should stop using it.
Signed-off-by: Kyr Shatskyy <kyrylo.shatskyy@suse.com>
self._conf = yaml.safe_load(conf)
return
if os.path.exists(self.yaml_path):
- self._conf = yaml.safe_load(file(self.yaml_path))
+ with open(self.yaml_path) as f:
+ self._conf = yaml.safe_load(f)
else:
log.debug("%s not found", self.yaml_path)
self._conf = dict()
test_summaries = {}
for test in tests:
summary = {}
- with file(os.path.join(test_dir, test, 'summary.yaml')) as f:
+ with open(os.path.join(test_dir, test, 'summary.yaml')) as f:
g = yaml.safe_load_all(f)
for new in g:
summary.update(new)
if ctx.targets:
try:
- with file(ctx.targets) as f:
+ with open(ctx.targets) as f:
g = yaml.safe_load_all(f)
for new in g:
if 'targets' in new:
for m in args['<machine>']]
elif args['--targets']:
targets = args['--targets']
- with file(targets) as f:
+ with open(targets) as f:
docs = yaml.safe_load_all(f)
for doc in docs:
machines = [n for n in doc.get('targets', dict()).iterkeys()]
job_dir = os.path.join(archive_dir, j)
summary = {}
try:
- with file(os.path.join(job_dir, 'summary.yaml')) as f:
+ with open(os.path.join(job_dir, 'summary.yaml')) as f:
g = yaml.safe_load_all(f)
for new in g:
summary.update(new)
"""
config_dict = {}
try:
- with file(string) as f:
+ with open(string) as f:
g = yaml.safe_load_all(f)
for new in g:
config_dict.update(new)
if not os.path.exists(conf_path):
log.debug("The config path {0} does not exist, skipping.".format(conf_path))
continue
- with file(conf_path) as partial_file:
+ with open(conf_path) as partial_file:
partial_dict = yaml.safe_load(partial_file)
try:
conf_dict = deep_merge(conf_dict, partial_dict)
ctx.config = config_file(ctx.archive + '/config.yaml')
ifn = os.path.join(ctx.archive, 'info.yaml')
if os.path.exists(ifn):
- with file(ifn, 'r') as fd:
+ with open(ifn, 'r') as fd:
info = yaml.load(fd.read())
if not ctx.pid:
ctx.pid = info.get('pid')
import dateutil
import json
-from mock import patch, MagicMock
+from mock import patch, mock_open
from pytest import mark
from teuthology.provision.cloud import util
def test_get_user_ssh_pubkey(path, exists):
with patch('os.path.exists') as m_exists:
m_exists.return_value = exists
- with patch('teuthology.provision.cloud.util.file') as m_file:
- m_file.return_value = MagicMock(spec=file)
+ with patch('__builtin__.open', mock_open(), create=True) as m_open:
util.get_user_ssh_pubkey(path)
if exists:
- assert m_file.called_once_with(path, 'rb')
+ m_open.assert_called_once_with(path, 'rb')
@mark.parametrize(
self.mocks = dict()
for name, patcher in self.patchers.items():
self.mocks[name] = patcher.start()
- self.mocks['m_open'].return_value = MagicMock(spec=file)
def teardown(self):
for patcher in self.patchers.values():
full_path = os.path.expanduser(path)
if not os.path.exists(full_path):
return
- with file(full_path, 'rb') as f:
+ with open(full_path, 'rb') as f:
return f.read().strip()
# Depending on whether it passed or failed, we have a different age
# threshold
summary_lines = [line.strip() for line in
- file(summary_path).readlines()]
+ open(summary_path).readlines()]
if 'success: true' in summary_lines:
status = 'passed'
days = pass_days
yaml_path = os.path.join(job_archive_dir, yaml_name)
if not os.path.exists(yaml_path):
continue
- with file(yaml_path) as yaml_file:
+ with open(yaml_path) as yaml_file:
partial_info = yaml.safe_load(yaml_file)
if partial_info is not None:
job_info.update(partial_info)
if hasattr(self, '__last_run'):
return self.__last_run
elif os.path.exists(self.last_run_file):
- with file(self.last_run_file) as f:
+ with open(self.last_run_file) as f:
self.__last_run = f.read().strip()
return self.__last_run
@last_run.setter
def last_run(self, run_name):
self.__last_run = run_name
- with file(self.last_run_file, 'w') as f:
+ with open(self.last_run_file, 'w') as f:
f.write(run_name)
@last_run.deleter
# parse the log file generated by teuthology.results.results()
subset = None
seed = None
- with file(log_path) as results_log:
+ with open(log_path) as results_log:
for line in results_log:
if ':' not in line:
# stop if this does not look line a log line
def write_initial_metadata(archive, config, name, description, owner):
if archive is not None:
- with file(os.path.join(archive, 'pid'), 'w') as f:
+ with open(os.path.join(archive, 'pid'), 'w') as f:
f.write('%d' % os.getpid())
- with file(os.path.join(archive, 'owner'), 'w') as f:
+ with open(os.path.join(archive, 'owner'), 'w') as f:
f.write(owner + '\n')
- with file(os.path.join(archive, 'orig.config.yaml'), 'w') as f:
+ with open(os.path.join(archive, 'orig.config.yaml'), 'w') as f:
yaml.safe_dump(config, f, default_flow_style=False)
info = {
if 'job_id' in config:
info['job_id'] = config['job_id']
- with file(os.path.join(archive, 'info.yaml'), 'w') as f:
+ with open(os.path.join(archive, 'info.yaml'), 'w') as f:
yaml.safe_dump(info, f, default_flow_style=False)
nuke(fake_ctx, fake_ctx.lock)
if archive is not None:
- with file(os.path.join(archive, 'summary.yaml'), 'w') as f:
+ with open(os.path.join(archive, 'summary.yaml'), 'w') as f:
yaml.safe_dump(summary, f, default_flow_style=False)
with contextlib.closing(StringIO.StringIO()) as f:
if not is_collected:
continue
- raw_yaml = '\n'.join([file(a, 'r').read() for a in fragment_paths])
+ raw_yaml = '\n'.join([open(a, 'r').read() for a in fragment_paths])
parsed_yaml = yaml.safe_load(raw_yaml)
os_type = parsed_yaml.get('os_type') or self.base_config.os_type
import pytest
import requests
import yaml
+import contextlib
from datetime import datetime
from mock import patch, call, ANY, DEFAULT
@patch('teuthology.suite.util.has_packages_for_distro')
@patch('teuthology.suite.util.get_package_versions')
@patch('teuthology.suite.util.get_install_task_flavor')
- @patch('__builtin__.file')
+ @patch('__builtin__.open')
@patch('teuthology.suite.run.build_matrix')
@patch('teuthology.suite.util.git_ls_remote')
@patch('teuthology.suite.util.package_version_for_hash')
m_package_version_for_hash,
m_git_ls_remote,
m_build_matrix,
- m_file,
+ m_open,
m_get_install_task_flavor,
m_get_package_versions,
m_has_packages_for_distro,
m_build_matrix.return_value = build_matrix_output
frag1_read_output = 'field1: val1'
frag2_read_output = 'field2: val2'
- m_file.side_effect = [
+ m_open.side_effect = [
StringIO(frag1_read_output),
StringIO(frag2_read_output),
+ contextlib.closing(StringIO())
]
m_get_install_task_flavor.return_value = 'basic'
m_get_package_versions.return_value = dict()
@patch('teuthology.suite.util.has_packages_for_distro')
@patch('teuthology.suite.util.get_package_versions')
@patch('teuthology.suite.util.get_install_task_flavor')
- @patch('__builtin__.file')
+ @patch('__builtin__.open', create=True)
@patch('teuthology.suite.run.build_matrix')
@patch('teuthology.suite.util.git_ls_remote')
@patch('teuthology.suite.util.package_version_for_hash')
m_package_version_for_hash,
m_git_ls_remote,
m_build_matrix,
- m_file,
+ m_open,
m_get_install_task_flavor,
m_get_package_versions,
m_has_packages_for_distro,
(build_matrix_desc, build_matrix_frags),
]
m_build_matrix.return_value = build_matrix_output
- m_file.side_effect = [StringIO('field: val\n') for i in xrange(11)]
+ m_open.side_effect = [StringIO('field: val\n') for i in xrange(11)]
m_get_install_task_flavor.return_value = 'basic'
m_get_package_versions.return_value = dict()
m_has_packages_for_distro.side_effect = [
@patch('teuthology.suite.util.has_packages_for_distro')
@patch('teuthology.suite.util.get_package_versions')
@patch('teuthology.suite.util.get_install_task_flavor')
- @patch('__builtin__.file')
+ @patch('__builtin__.open', create=True)
@patch('teuthology.suite.run.build_matrix')
@patch('teuthology.suite.util.git_ls_remote')
@patch('teuthology.suite.util.package_version_for_hash')
m_package_version_for_hash,
m_git_ls_remote,
m_build_matrix,
- m_file,
+ m_open,
m_get_install_task_flavor,
m_get_package_versions,
m_has_packages_for_distro,
(build_matrix_desc, build_matrix_frags),
]
m_build_matrix.return_value = build_matrix_output
- m_file.side_effect = [
+ m_open.side_effect = [
StringIO('field: val\n') for i in xrange(NUM_FAILS+1)
- ]
+ ] + [
+ contextlib.closing(StringIO())
+ ]
m_get_install_task_flavor.return_value = 'basic'
m_get_package_versions.return_value = dict()
# NUM_FAILS, then success
pb_in_repo = os.path.join(self.repo_path, playbook_path)
if os.path.exists(pb_in_repo):
playbook_path = pb_in_repo
- self.playbook_file = file(playbook_path)
+ self.playbook_file = open(playbook_path)
playbook_yaml = yaml.safe_load(self.playbook_file)
self.playbook = playbook_yaml
except Exception:
if inv_suffix:
inv_fn = '.'.join(inv_fn, inv_suffix)
# Write out the inventory file
- inv_file = file(inv_fn, 'w')
+ inv_file = open(inv_fn, 'w')
inv_file.write(inventory)
# Next, write the group_vars files
all_group_vars = self.config.get('group_vars')
for group_name in sorted(all_group_vars):
group_vars = all_group_vars[group_name]
path = os.path.join(group_vars_dir, group_name + '.yml')
- gv_file = file(path, 'w')
+ gv_file = open(path, 'w')
yaml.safe_dump(group_vars, gv_file)
return inventory_dir
args=args[skip_nostdin:],
)
else:
- with file('/dev/null', 'rb') as devnull:
+ with open('/dev/null', 'rb') as devnull:
proc = subprocess.Popen(
args=args,
stdin=devnull,
log.info('Shipping valgrind.supp...')
assert 'suite_path' in ctx.config
try:
- with file(
+ with open(
os.path.join(ctx.config['suite_path'], 'valgrind.supp'),
'rb'
) as f:
src = os.path.join(os.path.dirname(__file__), filename)
dst = os.path.join(destdir, filename)
filenames.append(dst)
- with file(src, 'rb') as f:
+ with open(src, 'rb') as f:
for rem in ctx.cluster.remotes.iterkeys():
teuthology.sudo_write_file(
remote=rem,
"""
log.info('Saving configuration')
if ctx.archive is not None:
- with file(os.path.join(ctx.archive, 'config.yaml'), 'w') as f:
+ with open(os.path.join(ctx.archive, 'config.yaml'), 'w') as f:
yaml.safe_dump(ctx.config, f, default_flow_style=False)
So that other software can be loosely coupled to teuthology
"""
if ctx.archive is not None:
- with file(os.path.join(ctx.archive, 'info.yaml'), 'r+') as info_file:
+ with open(os.path.join(ctx.archive, 'info.yaml'), 'r+') as info_file:
info_yaml = yaml.safe_load(info_file)
info_file.seek(0)
info_yaml['cluster'] = dict([(rem.name, {'roles': roles}) for rem, roles in ctx.cluster.remotes.iteritems()])
proc = role_remote.run(
args=[
'python', '-c',
- 'import shutil, sys; shutil.copyfileobj(sys.stdin, file(sys.argv[1], "wb"))',
+ 'import shutil, sys; shutil.copyfileobj(sys.stdin, open(sys.argv[1], "wb"))',
remote_pkg_path(role_remote),
],
wait=False,
archive_dir = os.path.join(run_archive_dir, str(job['job_id']))
os.mkdir(archive_dir)
- with file(os.path.join(archive_dir, 'info.yaml'), 'w') as yfile:
+ with open(os.path.join(archive_dir, 'info.yaml'), 'w') as yfile:
yaml.safe_dump(job['info'], yfile)
if 'summary' in job:
summary_path = os.path.join(archive_dir, 'summary.yaml')
- with file(summary_path, 'w') as yfile:
+ with open(summary_path, 'w') as yfile:
yaml.safe_dump(job['summary'], yfile)
def create_fake_run(self, run_name, job_count, yaml_path, num_hung=0):
m_NTF,
)
self.patchers['file'] = patch(
- 'teuthology.task.ansible.file', create=True)
+ 'teuthology.task.ansible.open', create=True)
self.patchers['os_mkdir'] = patch(
'teuthology.task.ansible.os.mkdir',
)
import requests
import urlparse
-from mock import patch, DEFAULT, Mock, MagicMock, call
+from mock import patch, DEFAULT, Mock, mock_open, call
from pytest import raises
from teuthology.config import config, FakeNamespace
m_resp = Mock()
m_resp.ok = True
m_get.return_value = m_resp
- with patch('teuthology.task.pcp.open', create=True) as m_open:
- m_open.return_value = MagicMock(spec=file)
+ with patch('teuthology.task.pcp.open', mock_open(), create=True):
obj.download_graphs()
expected_filenames = []
for metric in obj.metrics:
m_resp = Mock()
m_resp.ok = True
m_get.return_value = m_resp
- with patch('teuthology.task.pcp.open', create=True) as m_open:
- m_open.return_value = MagicMock(spec=file)
+ with patch('teuthology.task.pcp.open', mock_open(), create=True):
obj.download_graphs()
html = obj.generate_html(mode='static')
assert config.pcp_host not in html
assert results == ["1", "3"]
@patch("yaml.safe_load_all")
- @patch("__builtin__.file")
@patch("teuthology.ls.get_jobs")
- def test_ls(self, m_get_jobs, m_file, m_safe_load_all):
+ def test_ls(self, m_get_jobs, m_safe_load_all):
m_get_jobs.return_value = ["1", "2"]
m_safe_load_all.return_value = [{"failure_reason": "reasons"}]
ls.ls("some/archive/div", True)
- @patch("__builtin__.file")
+ @patch("__builtin__.open")
@patch("teuthology.ls.get_jobs")
- def test_ls_ioerror(self, m_get_jobs, m_file):
+ def test_ls_ioerror(self, m_get_jobs, m_open):
m_get_jobs.return_value = ["1", "2"]
- m_file.side_effect = IOError()
+ m_open.side_effect = IOError()
with pytest.raises(IOError):
ls.ls("some/archive/dir", True)
@patch("os.path.exists")
@patch("yaml.safe_load")
- @patch("__builtin__.file")
- def test_merge_configs(self, m_file, m_safe_load, m_exists):
+ @patch("__builtin__.open")
+ def test_merge_configs(self, m_open, m_safe_load, m_exists):
""" Only tests with one yaml file being passed, mainly just to test
the loop logic. The actual merge will be tested in subsequent
tests.
m_safe_load.return_value = expected
result = misc.merge_configs(["path/to/config1"])
assert result == expected
- m_file.assert_called_once_with("path/to/config1")
+ m_open.assert_called_once_with("path/to/config1")
def test_merge_configs_empty(self):
assert misc.merge_configs([]) == {}
jobs = self.archive.create_fake_run(run_name, job_count, yaml_path)
job = jobs[0]
- with file(yaml_path) as yaml_file:
+ with open(yaml_path) as yaml_file:
obj_from_yaml = yaml.safe_load(yaml_file)
full_obj = obj_from_yaml.copy()
full_obj.update(job['info'])
with pytest.raises(AssertionError):
run.setup_config(["some/config.yaml"])
- @patch("__builtin__.file")
- def test_write_initial_metadata(self, m_file):
+ @patch("__builtin__.open")
+ def test_write_initial_metadata(self, m_open):
config = {"job_id": "123", "foo": "bar"}
run.write_initial_metadata(
"some/archive/dir",
call('some/archive/dir/orig.config.yaml', 'w'),
call('some/archive/dir/info.yaml', 'w')
]
- assert m_file.call_args_list == expected
+ assert m_open.call_args_list == expected
def test_get_machine_type(self):
result = run.get_machine_type(None, {"machine-type": "the_machine_type"})
@patch("yaml.safe_dump")
@patch("teuthology.report.try_push_job_info")
@patch("teuthology.run.email_results")
- @patch("__builtin__.file")
+ @patch("__builtin__.open")
@patch("sys.exit")
- def test_report_outcome(self, m_sys_exit, m_file, m_email_results, m_try_push_job_info, m_safe_dump, m_nuke, m_get_status):
+ def test_report_outcome(self, m_sys_exit, m_open, m_email_results, m_try_push_job_info, m_safe_dump, m_nuke, m_get_status):
config = {"nuke-on-error": True, "email-on-error": True}
m_get_status.return_value = "fail"
fake_ctx = Mock()
run.report_outcome(config, "the/archive/path", summary, fake_ctx)
assert m_nuke.called
m_try_push_job_info.assert_called_with(config, summary)
- m_file.assert_called_with("the/archive/path/summary.yaml", "w")
+ m_open.assert_called_with("the/archive/path/summary.yaml", "w")
assert m_email_results.called
- assert m_file.called
+ assert m_open.called
assert m_sys_exit.called
@patch("teuthology.run.set_up_logging")
from teuthology import timer
-from mock import MagicMock, patch
+from mock import MagicMock, patch, mock_open
from time import time
_path = '/path'
_safe_dump = MagicMock(name='safe_dump')
with patch('teuthology.timer.yaml.safe_dump', _safe_dump):
- with patch('teuthology.timer.file') as _file:
- _file.return_value = MagicMock(spec=file)
+ with patch('teuthology.timer.open', mock_open(), create=True) as _open:
self.timer = timer.Timer(path=_path)
assert self.timer.path == _path
self.timer.write()
- _file.assert_called_once_with(_path, 'w')
+ _open.assert_called_once_with(_path, 'w')
_safe_dump.assert_called_once_with(
dict(),
- _file.return_value.__enter__.return_value,
+ _open.return_value.__enter__.return_value,
default_flow_style=False,
)
_path = '/path'
_safe_dump = MagicMock(name='safe_dump')
with patch('teuthology.timer.yaml.safe_dump', _safe_dump):
- with patch('teuthology.timer.file') as _file:
- _file.return_value = MagicMock(spec=file)
+ with patch('teuthology.timer.open', mock_open(), create=True) as _open:
self.timer = timer.Timer(path=_path, sync=True)
assert self.timer.path == _path
assert self.timer.sync is True
self.timer.mark()
- _file.assert_called_once_with(_path, 'w')
+ _open.assert_called_once_with(_path, 'w')
_safe_dump.assert_called_once_with(
self.timer.data,
- _file.return_value.__enter__.return_value,
+ _open.return_value.__enter__.return_value,
default_flow_style=False,
)
def write(self):
try:
- with file(self.path, 'w') as f:
+ with open(self.path, 'w') as f:
yaml.safe_dump(self.data, f, default_flow_style=False)
except Exception:
log.exception("Failed to write timing.yaml !")
def __enter__(self):
if not self.noop:
assert self.file is None
- self.file = file(self.filename, 'w')
+ self.file = open(self.filename, 'w')
fcntl.lockf(self.file, fcntl.LOCK_EX)
return self