]> git.apps.os.sepia.ceph.com Git - teuthology.git/commitdiff
Add argument parsing to make this a proper script.
authorZack Cerza <zack@cerza.org>
Wed, 2 Oct 2013 15:57:11 +0000 (10:57 -0500)
committerZack Cerza <zack@cerza.org>
Wed, 2 Oct 2013 23:34:04 +0000 (18:34 -0500)
Signed-off-by: Zack Cerza <zack.cerza@inktank.com>
setup.py
teuthology/report.py [new file with mode: 0755]
teuthology/results.py [deleted file]

index 7fe4d1e4145bcdf7cdb1cbf84e16c7d07929ec5f..b330b945d6ed11f815ec7f6c7827eb7045933ba7 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -26,6 +26,7 @@ setup(
             'teuthology-updatekeys = teuthology.lock:update_hostkeys',
             'teuthology-coverage = teuthology.coverage:analyze',
             'teuthology-results = teuthology.suite:results',
+            'teuthology-report = teuthology.report:main',
             ],
         },
 
diff --git a/teuthology/report.py b/teuthology/report.py
new file mode 100755 (executable)
index 0000000..5317785
--- /dev/null
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+
+import os
+import yaml
+import json
+import re
+import httplib2
+import logging
+import argparse
+from textwrap import dedent
+
+from teuthology.config import config
+
+
+log = logging.getLogger(__name__)
+logging.basicConfig(level=logging.INFO)
+
+
+class RequestFailedError(RuntimeError):
+    def __init__(self, resp, content):
+        self.status = resp.status
+        self.reason = resp.reason
+        self.content = content
+        try:
+            self.content_obj = json.loads(content)
+            self.message = self.content_obj['message']
+        except ValueError:
+            #self.message = '<no message>'
+            self.message = self.content
+
+    def __str__(self):
+        templ = "Request failed with status {status}: {reason}: {message}"
+
+        return templ.format(
+            status=self.status,
+            reason=self.reason,
+            message=self.message,
+        )
+
+
+class ResultsSerializer(object):
+    yamls = ('orig.config.yaml', 'config.yaml', 'info.yaml', 'summary.yaml')
+
+    def __init__(self, archive_base):
+        self.archive_base = archive_base
+
+    def json_for_job(self, run_name, job_id, pretty=False):
+        job_archive_dir = os.path.join(self.archive_base,
+                                       run_name,
+                                       job_id)
+        job_info = {}
+        for yaml_name in self.yamls:
+            yaml_path = os.path.join(job_archive_dir, yaml_name)
+            if not os.path.exists(yaml_path):
+                continue
+            with file(yaml_path) as yaml_file:
+                partial_info = yaml.safe_load(yaml_file)
+                if partial_info is not None:
+                    job_info.update(partial_info)
+
+        if 'job_id' not in job_info:
+            job_info['job_id'] = job_id
+
+        if pretty:
+            job_json = json.dumps(job_info, sort_keys=True, indent=4)
+        else:
+            job_json = json.dumps(job_info)
+
+        return job_json
+
+    def print_pretty_json(self, json_obj):
+        log.info('\n'.join([l.rstrip() for l in json_obj.splitlines()]))
+
+    def jobs_for_run(self, run_name):
+        archive_dir = os.path.join(self.archive_base, run_name)
+        if not os.path.isdir(archive_dir):
+            return {}
+        jobs = {}
+        for item in os.listdir(archive_dir):
+            if not re.match('\d+$', item):
+                continue
+            job_id = item
+            job_dir = os.path.join(archive_dir, job_id)
+            if os.path.isdir(job_dir):
+                jobs[job_id] = job_dir
+        return jobs
+
+    @property
+    def all_runs(self):
+        archive_base = self.archive_base
+        if not os.path.isdir(archive_base):
+            return []
+        runs = []
+        for run_name in os.listdir(archive_base):
+            if not os.path.isdir(os.path.join(archive_base, run_name)):
+                continue
+            runs.append(run_name)
+        return runs
+
+
+class ResultsPoster(object):
+    last_run_file = 'last_successful_run'
+
+    def __init__(self, archive_base, base_uri=None):
+        self.archive_base = archive_base
+        self.base_uri = base_uri or config.results_server
+        self.base_uri = self.base_uri.rstrip('/')
+        self.serializer = ResultsSerializer(archive_base)
+        self.save_last_run = True
+
+    def post_all_runs(self):
+        all_runs = self.serializer.all_runs
+        last_run = self.last_run
+        if self.save_last_run and last_run and last_run in all_runs:
+            next_index = all_runs.index(last_run) + 1
+            runs = all_runs[next_index:]
+        else:
+            runs = all_runs
+        num_runs = len(runs)
+        num_jobs = 0
+        log.info("Posting %s runs", num_runs)
+        for run in runs:
+            job_count = self.post_run(run)
+            num_jobs += job_count
+            if self.save_last_run:
+                self.last_run = run
+        del self.last_run
+        log.info("Total: %s jobs in %s runs", num_jobs, num_runs)
+
+    def post_runs(self, run_names):
+        num_jobs = 0
+        for run_name in run_names:
+            num_jobs += self.post_run(run_name)
+        log.info("Total: %s jobs in %s runs", num_jobs, len(run_names))
+
+    def post_run(self, run_name):
+        jobs = self.serializer.jobs_for_run(run_name)
+        log.info("{name} {jobs} jobs".format(
+            name=run_name,
+            jobs=len(jobs),
+        ))
+        if jobs:
+            h = httplib2.Http()
+            run_json = json.dumps({'name': run_name})
+            resp, content = h.request(
+                "{base}/runs/".format(base=self.base_uri, name=run_name),
+                'POST',
+                run_json,
+                headers={'content-type': 'application/json'},
+            )
+            if resp.status == 200:
+                for job_id in jobs.keys():
+                    self.post_job(run_name, job_id)
+            elif resp.status != 200:
+                message = json.loads(content).get('message', '')
+                # FIXME: This will skip partially-submitted runs.
+                if message.endswith('already exists'):
+                    log.info("    already present; skipped")
+                else:
+                    raise RequestFailedError(resp, content)
+        return len(jobs)
+
+    def post_job(self, run_name, job_id):
+        job_json = self.serializer.json_for_job(run_name, job_id)
+        h = httplib2.Http()
+        resp, content = h.request(
+            "{base}/runs/{name}/".format(base=self.base_uri, name=run_name,),
+            'POST',
+            job_json,
+            headers={'content-type': 'application/json'},
+        )
+        try:
+            message = json.loads(content).get('message', '')
+        except ValueError:
+            message = ''
+
+        if message.endswith('already exists'):
+            resp, content = h.request(
+                "{base}/runs/{name}/".format(
+                    base=self.base_uri,
+                    name=run_name,),
+                'PUT',
+                job_json,
+                headers={'content-type': 'application/json'},
+            )
+        if resp.status != 200:
+            raise RequestFailedError(resp, content)
+        return job_id
+
+    @property
+    def last_run(self):
+        if hasattr(self, '__last_run'):
+            return self.__last_run
+        elif os.path.exists(self.last_run_file):
+            with file(self.last_run_file) as f:
+                self.__last_run = f.read().strip()
+            return self.__last_run
+
+    @last_run.setter
+    def last_run(self, run_name):
+        self.__last_run = run_name
+        with file(self.last_run_file, 'w') as f:
+            f.write(run_name)
+
+    @last_run.deleter
+    def last_run(self):
+        self.__last_run = None
+        if os.path.exists(self.last_run_file):
+            os.remove(self.last_run_file)
+
+
+def parse_args():
+    parser = argparse.ArgumentParser(
+        description="Submit test results to a web service")
+    parser.add_argument('-a', '--archive', required=True,
+                        help="The base archive directory")
+    parser.add_argument('-r', '--run', nargs='*',
+                        help="A run (or list of runs) to submit")
+    parser.add_argument('--all-runs', action='store_true',
+                        help="Submit all runs in the archive")
+    parser.add_argument('-n', '--no-save', dest='save',
+                        action='store_false',
+                        help=dedent("""By default, when submitting all runs, we
+                        remember the last successful submission in a file
+                        called 'last_successful_run'. Pass this flag to disable
+                        that behavior."""))
+    args = parser.parse_args()
+    return args
+
+
+def main():
+    args = parse_args()
+    archive_base = os.path.abspath(os.path.expanduser(args.archive))
+    poster = ResultsPoster(archive_base)
+    if not args.save:
+        poster.save_last_run = False
+    if args.run and len(args.run) > 1:
+        poster.post_runs(args.run)
+    elif args.run:
+        poster.post_run(args.run[0])
+    elif args.all_runs:
+        poster.post_all_runs()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/teuthology/results.py b/teuthology/results.py
deleted file mode 100755 (executable)
index 342a07a..0000000
+++ /dev/null
@@ -1,199 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import os
-import yaml
-import json
-import re
-import httplib2
-import logging
-
-from teuthology.config import config
-
-
-log = logging.getLogger(__name__)
-
-
-class RequestFailedError(RuntimeError):
-    def __init__(self, resp, content):
-        self.status = resp.status
-        self.reason = resp.reason
-        self.content = content
-        try:
-            self.content_obj = json.loads(content)
-            self.message = self.content_obj['message']
-        except ValueError:
-            #self.message = '<no message>'
-            self.message = self.content
-
-    def __str__(self):
-        templ = "Request failed with status {status}: {reason}: {message}"
-
-        return templ.format(
-            status=self.status,
-            reason=self.reason,
-            message=self.message,
-        )
-
-
-class ResultsSerializer(object):
-    yamls = ('orig.config.yaml', 'config.yaml', 'info.yaml', 'summary.yaml')
-
-    def __init__(self, archive_base):
-        self.archive_base = archive_base
-
-    def json_for_job(self, run_name, job_id, pretty=False):
-        job_archive_dir = os.path.join(self.archive_base,
-                                       run_name,
-                                       job_id)
-        job_info = {}
-        for yaml_name in self.yamls:
-            yaml_path = os.path.join(job_archive_dir, yaml_name)
-            if not os.path.exists(yaml_path):
-                continue
-            with file(yaml_path) as yaml_file:
-                partial_info = yaml.safe_load(yaml_file)
-                if partial_info is not None:
-                    job_info.update(partial_info)
-
-        if 'job_id' not in job_info:
-            job_info['job_id'] = job_id
-
-        if pretty:
-            job_json = json.dumps(job_info, sort_keys=True, indent=4)
-        else:
-            job_json = json.dumps(job_info)
-
-        return job_json
-
-    def print_pretty_json(self, json_obj):
-        log.info('\n'.join([l.rstrip() for l in json_obj.splitlines()]))
-
-    def jobs_for_run(self, run_name):
-        archive_dir = os.path.join(self.archive_base, run_name)
-        if not os.path.isdir(archive_dir):
-            return {}
-        jobs = {}
-        for item in os.listdir(archive_dir):
-            if not re.match('\d+$', item):
-                continue
-            job_id = item
-            job_dir = os.path.join(archive_dir, job_id)
-            if os.path.isdir(job_dir):
-                jobs[job_id] = job_dir
-        return jobs
-
-    @property
-    def all_runs(self):
-        archive_base = self.archive_base
-        if not os.path.isdir(archive_base):
-            return []
-        runs = []
-        for run_name in os.listdir(archive_base):
-            if not os.path.isdir(os.path.join(archive_base, run_name)):
-                continue
-            runs.append(run_name)
-        return runs
-
-
-class ResultsPoster(object):
-    def __init__(self, archive_base, base_uri=None):
-        self.archive_base = archive_base
-        self.base_uri = base_uri or config.results_server
-        self.base_uri = self.base_uri.rstrip('/')
-        self.serializer = ResultsSerializer(archive_base)
-
-    def post_all_runs(self):
-        all_runs = self.serializer.all_runs
-        last_run = self.last_run
-        if last_run and last_run in all_runs:
-            next_index = all_runs.index(last_run) + 1
-            runs = all_runs[next_index:]
-        else:
-            runs = all_runs
-        num_runs = len(runs)
-        num_jobs = 0
-        log.info("Posting %s runs", num_runs)
-        for run in runs:
-            job_count = self.post_run(run)
-            num_jobs += job_count
-            self.last_run = run
-        log.info("Total: %s jobs in %s runs", num_jobs, num_runs)
-
-    def post_run(self, run_name):
-        jobs = self.serializer.jobs_for_run(run_name)
-        log.info("{name} {jobs} jobs".format(
-            name=run_name,
-            jobs=len(jobs),
-        ))
-        if jobs:
-            h = httplib2.Http()
-            run_json = json.dumps({'name': run_name})
-            resp, content = h.request(
-                "{base}/runs/".format(base=self.base_uri, name=run_name),
-                'POST',
-                run_json,
-                headers={'content-type': 'application/json'},
-            )
-            if resp.status == 200:
-                for job_id in jobs.keys():
-                    self.post_job(run_name, job_id)
-            elif resp.status != 200:
-                message = json.loads(content).get('message', '')
-                if message.endswith('already exists'):
-                    log.info("    already present; skipped")
-                else:
-                    raise RequestFailedError(resp, content)
-        return len(jobs)
-
-    def post_job(self, run_name, job_id):
-        job_json = self.serializer.json_for_job(run_name, job_id)
-        h = httplib2.Http()
-        resp, content = h.request(
-            "{base}/runs/{name}/".format(base=self.base_uri, name=run_name,),
-            'POST',
-            job_json,
-            headers={'content-type': 'application/json'},
-        )
-        try:
-            message = json.loads(content).get('message', '')
-        except ValueError:
-            message = ''
-
-        if message.endswith('already exists'):
-            resp, content = h.request(
-                "{base}/runs/{name}/".format(
-                    base=self.base_uri,
-                    name=run_name,),
-                'PUT',
-                job_json,
-                headers={'content-type': 'application/json'},
-            )
-        if resp.status != 200:
-            raise RequestFailedError(resp, content)
-        return job_id
-
-    @property
-    def last_run(self):
-        if hasattr(self, '__last_run'):
-            return self.__last_run
-        elif os.path.exists('last_successful_run'):
-            with file('last_successful_run') as f:
-                self.__last_run = f.read().strip()
-            return self.__last_run
-
-    @last_run.setter
-    def last_run(self, run_name):
-        self.__last_run = run_name
-        with file('last_successful_run', 'w') as f:
-            f.write(run_name)
-
-
-def main(argv):
-    archive_base = os.path.abspath(os.path.expanduser(argv[1]))
-    poster = ResultsPoster(archive_base)
-    poster.post_all_runs()
-
-
-if __name__ == "__main__":
-    main(sys.argv)