jobs[job_id] = job_dir
return jobs
+ def running_jobs_for_run(self, run_name):
+ """
+ Like jobs_for_run(), but only returns jobs with no summary.yaml
+
+ :param run_name: The name of the run.
+ :returns: A dict like: {'1': '/path/to/1', '2': 'path/to/2'}
+ """
+ jobs = self.jobs_for_run(run_name)
+ for name in jobs.keys():
+ if not os.path.exists(os.path.join(jobs[name], 'summary.yaml')):
+ jobs.pop(name)
+ return jobs
+
@property
def all_runs(self):
"""
with file(summary_path, 'w') as yfile:
yaml.safe_dump(job['summary'], yfile)
- def create_fake_run(self, run_name, job_count, yaml_path):
+ def create_fake_run(self, run_name, job_count, yaml_path, num_hung=0):
"""
Creates a fake run using run_name. Uses the YAML specified for each
job's config.yaml
assert os.path.exists(yaml_path)
assert job_count > 0
jobs = []
+ made_hung = 0
for i in range(job_count):
- jobs.append(self.get_random_metadata(run_name))
+ if made_hung < num_hung:
+ jobs.append(self.get_random_metadata(run_name, hung=True))
+ made_hung += 1
+ else:
+ jobs.append(self.get_random_metadata(run_name, hung=False))
#job_config = yaml.safe_load(yaml_path)
self.populate_archive(run_name, jobs)
for job in jobs:
got_jobs = self.reporter.serializer.jobs_for_run(run_name)
assert sorted(job_ids) == sorted(got_jobs.keys())
+ def test_running_jobs_for_run(self):
+ run_name = "test_jobs_for_run"
+ yaml_path = "examples/3node_ceph.yaml"
+ job_count = 10
+ num_hung = 3
+ self.archive.create_fake_run(run_name, job_count, yaml_path,
+ num_hung=num_hung)
+
+ got_jobs = self.reporter.serializer.running_jobs_for_run(run_name)
+ assert len(got_jobs) == job_count - num_hung
+
def test_json_for_job(self):
run_name = "test_json_for_job"
yaml_path = "examples/3node_ceph.yaml"