log.info('Suite %s in %s generated %d jobs (not yet filtered)' % (
suite_name, path, len(configs)))
+ # used as a local cache for package versions from gitbuilder
package_versions = dict()
jobs_to_schedule = []
+ jobs_missing_packages = []
for description, fragment_paths in configs:
if limit > 0 and len(jobs_to_schedule) >= limit:
log.info(
exclude_os_type, description)
continue
- # Make sure packages actually exist for this distro. Cache results so
- # we don't hammer the gitbuilder mirror unnecessarily hard.
- sha1 = job_config.sha1
- package_versions_for_hash = package_versions.get(sha1, dict())
- if str(os_type) not in package_versions_for_hash:
- package_version = package_version_for_hash(sha1,
- distro=str(os_type))
- package_versions_for_hash[str(os_type)] = package_version
- package_versions[sha1] = package_versions_for_hash
- else:
- package_version = package_versions_for_hash[str(os_type)]
-
- if not package_version:
- m = "Packages for os_type '{os}' and ceph hash '{ver}' not found"
- schedule_fail(m.format(os=os_type, ver=sha1), job_config.name)
-
arg = copy.deepcopy(base_args)
arg.extend([
'--description', description,
arg.extend(base_yamls)
arg.extend(fragment_paths)
- jobs_to_schedule.append({'yaml': parsed_yaml, 'desc': description,
- 'args': arg})
+ job = dict(
+ yaml=parsed_yaml,
+ desc=description,
+ args=arg
+ )
+
+ if dry_run:
+ sha1 = job_config.sha1
+ # Get package versions for this sha1 and os_type. If we've already
+ # retrieved them in a previous loop, they'll be present in
+ # package_versions and gitbuilder will not be asked again for them.
+ package_versions = get_package_versions(
+ sha1,
+ os_type,
+ package_versions
+ )
+
+ if not has_packages_for_distro(sha1, os_type, package_versions):
+ m = "Packages for os_type '{os}' and ceph hash '{ver}' not found"
+ log.info(m.format(os=os_type, ver=sha1))
+ jobs_missing_packages.append(job)
+
+ jobs_to_schedule.append(job)
for job in jobs_to_schedule:
log.info(
printable_args.append("'%s'" % item)
else:
printable_args.append(item)
- log.info('dry-run: %s' % ' '.join(printable_args))
+ prefix = "dry-run:"
+ if job in jobs_missing_packages:
+ prefix = "dry-run (missing packages):"
+ log.info('%s %s' % (prefix, ' '.join(printable_args)))
else:
subprocess.check_call(
args=job['args'],
)
count = len(jobs_to_schedule)
+ missing_count = len(jobs_missing_packages)
log.info('Suite %s in %s scheduled %d jobs.' % (suite_name, path, count))
log.info('Suite %s in %s -- %d jobs were filtered out.' %
(suite_name, path, len(configs) - count))
+ if dry_run:
+ log.info('Suite %s in %s scheduled %d jobs with missing packages.' %
+ (suite_name, path, missing_count))
return count
+def get_package_versions(sha1, os_type, package_versions=None):
+ """
+ Will retrieve the package versions for the given sha1 and os_type
+ from gitbuilder.
+
+ Optionally, a package_versions dict can be provided
+ from previous calls to this function to avoid calling gitbuilder for
+ information we've already retrieved.
+
+ The package_versions dict will be in the following format::
+
+ {
+ "sha1": {
+ "ubuntu": "version",
+ "rhel": "version",
+ },
+ "another-sha1": {
+ "ubuntu": "version",
+ }
+ }
+
+ :param sha1: The sha1 hash of the ceph version.
+ :param os_type: The distro we want to get packages for, given
+ the ceph sha1. Ex. 'ubuntu', 'rhel', etc.
+ :param package_versions: Use this optionally to use cached results of
+ previous calls to gitbuilder.
+ :returns: A dict of package versions. Will return versions
+ for all hashs and distros, not just for the given
+ hash and distro.
+ """
+ if package_versions is None:
+ package_versions = dict()
+
+ os_type = str(os_type)
+
+ package_versions_for_hash = package_versions.get(sha1, dict())
+ if os_type not in package_versions_for_hash:
+ package_version = package_version_for_hash(
+ sha1,
+ distro=os_type
+ )
+ package_versions_for_hash[os_type] = package_version
+ package_versions[sha1] = package_versions_for_hash
+
+ return package_versions
+
+
+def has_packages_for_distro(sha1, os_type, package_versions=None):
+ """
+ Checks to see if gitbuilder has packages for the given sha1 and os_type.
+
+ Optionally, a package_versions dict can be provided
+ from previous calls to this function to avoid calling gitbuilder for
+ information we've already retrieved.
+
+ The package_versions dict will be in the following format::
+
+ {
+ "sha1": {
+ "ubuntu": "version",
+ "rhel": "version",
+ },
+ "another-sha1": {
+ "ubuntu": "version",
+ }
+ }
+
+ :param sha1: The sha1 hash of the ceph version.
+ :param os_type: The distro we want to get packages for, given
+ the ceph sha1. Ex. 'ubuntu', 'rhel', etc.
+ :param package_versions: Use this optionally to use cached results of
+ previous calls to gitbuilder.
+ :returns: True, if packages are found. False otherwise.
+ """
+ os_type = str(os_type)
+ if package_versions is None:
+ package_versions = get_package_versions(sha1, os_type)
+
+ package_versions_for_hash = package_versions.get(sha1, dict())
+ # we want to return a boolean here, not the actual package versions
+ return bool(package_versions_for_hash.get(os_type, None))
+
+
def combine_path(left, right):
"""
os.path.join(a, b) doesn't like it when b is None
+from copy import deepcopy
from datetime import datetime
+from mock import patch
+
from teuthology import suite
output_dict = suite.substitute_placeholders(suite.dict_templ,
input_dict)
assert 'os_type' not in output_dict
+
+
+class TestMissingPackages(object):
+ """
+ Tests the functionality that checks to see if a
+ scheduled job will have missing packages in gitbuilder.
+ """
+ def setup(self):
+ package_versions = dict(
+ sha1=dict(
+ ubuntu="1.0"
+ )
+ )
+ self.pv = package_versions
+
+ def test_os_in_package_versions(self):
+ assert self.pv == suite.get_package_versions(
+ "sha1",
+ "ubuntu",
+ package_versions=self.pv
+ )
+
+ @patch("teuthology.suite.package_version_for_hash")
+ def test_os_not_in_package_versions(self, m_package_versions_for_hash):
+ m_package_versions_for_hash.return_value = "1.1"
+ result = suite.get_package_versions(
+ "sha1",
+ "rhel",
+ package_versions=self.pv
+ )
+ expected = deepcopy(self.pv)
+ expected['sha1'].update(dict(rhel="1.1"))
+ assert result == expected
+
+ @patch("teuthology.suite.package_version_for_hash")
+ def test_package_versions_not_found(self, m_package_versions_for_hash):
+ # if gitbuilder returns a status that's not a 200, None is returned
+ m_package_versions_for_hash.return_value = None
+ result = suite.get_package_versions(
+ "sha1",
+ "rhel",
+ package_versions=self.pv
+ )
+ assert result == self.pv
+
+ @patch("teuthology.suite.package_version_for_hash")
+ def test_no_package_versions_kwarg(self, m_package_versions_for_hash):
+ m_package_versions_for_hash.return_value = "1.0"
+ result = suite.get_package_versions(
+ "sha1",
+ "ubuntu",
+ )
+ expected = deepcopy(self.pv)
+ assert result == expected
+
+ def test_distro_has_packages(self):
+ result = suite.has_packages_for_distro(
+ "sha1",
+ "ubuntu",
+ package_versions=self.pv,
+ )
+ assert result
+
+ def test_distro_does_not_have_packages(self):
+ result = suite.has_packages_for_distro(
+ "sha1",
+ "rhel",
+ package_versions=self.pv,
+ )
+ assert not result
+
+ @patch("teuthology.suite.get_package_versions")
+ def test_has_packages_no_package_versions(self, m_get_package_versions):
+ m_get_package_versions.return_value = self.pv
+ result = suite.has_packages_for_distro(
+ "sha1",
+ "rhel",
+ )
+ assert not result