from datetime import datetime
from ..config import config, JobConfig
-from ..exceptions import (BranchNotFoundError, CommitNotFoundError,)
+from ..exceptions import (
+ BranchNotFoundError, CommitNotFoundError, VersionNotFoundError
+)
from ..misc import deep_merge, get_results_url
from . import util
WAIT_PAUSE = 5 * 60
__slots__ = (
'args', 'name', 'base_config', 'suite_repo_path', 'base_yaml_paths',
- 'base_args',
+ 'base_args', 'package_versions',
)
def __init__(self, args):
self.args = args
self.name = self.make_run_name()
self.base_config = self.create_initial_config()
+ # caches package versions to minimize requests to gbs
+ self.package_versions = dict()
if self.args.suite_dir:
self.suite_repo_path = self.args.suite_dir
return ceph_hash
def choose_ceph_version(self, ceph_hash):
- if config.suite_verify_ceph_hash:
+ if config.suite_verify_ceph_hash and not self.args.newest:
+ # don't bother if newest; we'll search for an older one
# Get the ceph package version
ceph_version = util.package_version_for_hash(
ceph_hash, self.args.kernel_flavor, self.args.distro,
if results_url:
log.info("Test results viewable at %s", results_url)
- def schedule_suite(self):
- """
- Schedule the suite run. Returns the number of jobs scheduled.
- """
- name = self.name
- arch = util.get_arch(self.base_config.machine_type)
- suite_name = self.base_config.suite
- suite_path = os.path.join(
- self.suite_repo_path, 'suites',
- self.base_config.suite.replace(':', '/'))
- log.debug('Suite %s in %s' % (suite_name, suite_path))
- configs = [
- (combine_path(suite_name, item[0]), item[1]) for item in
- build_matrix(suite_path, subset=self.args.subset)
- ]
- log.info('Suite %s in %s generated %d jobs (not yet filtered)' % (
- suite_name, suite_path, len(configs)))
- def collect_jobs(self, arch, configs):
- # used as a local cache for package versions from gitbuilder
- package_versions = dict()
+ def collect_jobs(self, arch, configs, newest=False):
jobs_to_schedule = []
jobs_missing_packages = []
for description, fragment_paths in configs:
args=arg
)
+ sha1 = self.base_config.sha1
if config.suite_verify_ceph_hash:
full_job_config = dict()
deep_merge(full_job_config, self.base_config.to_dict())
deep_merge(full_job_config, parsed_yaml)
flavor = util.get_install_task_flavor(full_job_config)
- sha1 = self.base_config.sha1
# Get package versions for this sha1, os_type and flavor. If
# we've already retrieved them in a previous loop, they'll be
# present in package_versions and gitbuilder will not be asked
# again for them.
- package_versions = util.get_package_versions(
- sha1,
- os_type,
- flavor,
- package_versions
- )
+ try:
+ self.package_versions = util.get_package_versions(
+ sha1,
+ os_type,
+ flavor,
+ self.package_versions
+ )
+ except VersionNotFoundError:
+ pass
if not util.has_packages_for_distro(sha1, os_type, flavor,
- package_versions):
+ self.package_versions):
m = "Packages for os_type '{os}', flavor {flavor} and " + \
"ceph hash '{ver}' not found"
log.error(m.format(os=os_type, flavor=flavor, ver=sha1))
jobs_missing_packages.append(job)
+ # optimization: one missing package causes backtrack in newest mode;
+ # no point in continuing the search
+ if newest:
+ return jobs_missing_packages, None
jobs_to_schedule.append(job)
return jobs_missing_packages, jobs_to_schedule
log.info('Suite %s in %s generated %d jobs (not yet filtered)' % (
suite_name, suite_path, len(configs)))
- jobs_missing_packages, jobs_to_schedule = self.collect_jobs(arch, configs)
+ # if newest, do this until there are no missing packages
+ # if not, do it once
+ backtrack = 0
+ limit = self.args.newest
+ while backtrack < limit:
+ jobs_missing_packages, jobs_to_schedule = \
+ self.collect_jobs(arch, configs, self.args.newest)
+ if jobs_missing_packages and self.args.newest:
+ self.base_config.sha1 = \
+ util.find_git_parent('ceph', self.base_config.sha1)
+ if self.base_config.sha1 is None:
+ util.schedule_fail(
+ name, message='Backtrack for --newest failed'
+ )
+ backtrack += 1
+ continue
+ if backtrack:
+ log.info("--newest supplied, backtracked %d commits to %s" %
+ (backtrack, self.base_config.sha1))
+ break
+ else:
+ util.schedule_fail(
+ name,
+ message='Exceeded %d backtracks; raise --newest value' % limit
+ )
self.schedule_jobs(jobs_missing_packages, jobs_to_schedule, name)
))
if not dry_run or (dry_run and verbose > 1):
subprocess.check_call(args=args)
+
+
+def find_git_parent(project, sha1):
+
+ base_url = config.githelper_base_url
+ if not base_url:
+ log.warning('githelper_base_url not set, --newest disabled')
+ return None
+
+ def refresh(project):
+ url = '%s/%s.git/refresh' % (base_url, project)
+ resp = requests.get(url)
+ if not resp.ok:
+ log.error('git refresh failed for %s: %s', project, resp.content)
+
+ def get_sha1s(project, commitish, count):
+ url = '/'.join((base_url, '%s.git' % project,
+ 'history/?commitish=%s&count=%d' % (commitish, count)))
+ resp = requests.get(url)
+ resp.raise_for_status()
+ sha1s = resp.json()['sha1s']
+ if len(sha1s) != count:
+ log.error('can''t find %d parents of %s in %s: %s',
+ int(count), sha1, project, resp.json()['error'])
+ return sha1s
+
+ # XXX don't do this every time?..
+ refresh(project)
+ # we want the one just before sha1; list two, return the second
+ sha1s = get_sha1s(project, sha1, 2)
+ if len(sha1s) == 2:
+ return sha1s[1]
+ else:
+ return None