--- /dev/null
+import docopt
+
+import teuthology.coverage_report
+import teuthology.gencov
+import sys
+
+doc = """
+usage: teuthology-gencov -h
+ teuthology-gencov <run-name>
+
+ """
+
+def main(argv=sys.argv[1:]):
+ args = docopt.docopt(doc, argv=argv)
+ teuthology.gencov.main(args)
+
'teuthology-queue = scripts.queue:main',
'teuthology-prune-logs = scripts.prune_logs:main',
'teuthology-describe-tests = scripts.describe_tests:main',
+ 'teuthology-gencov = scripts.gencov:main',
],
},
--- /dev/null
+import os
+
+N_ENTRIES=10
+genpath="/a/code_coverage_logs/"
+
+
+def gen_html(genpath, N_ENTRIES):
+ if not os.path.exists(genpath):
+ print 'path doesnt exists'
+ files = sorted([genpath+ent for ent in os.listdir(genpath)],key=os.path.getctime, reverse=True)
+ for ent in files:
+ pass
+ #print os.path.basename(ent)
+ body=""
+ for ent in files:
+ if not os.path.isdir(ent):
+ pass
+ else:
+ body=body+\
+ "<TR>\n"+"<TD>"+\
+ "<a href=\"{}\">".format(os.path.basename(ent)+"/index.html")+os.path.basename(ent)+"</a></TD>"+\
+ "<TD>"+\
+ "<a href=\"http://pulpito.ceph.redhat.com/{}\">".format(os.path.basename(ent))+"Run"+"</a></TD>"+\
+ "</TR>\n"
+
+ page=" <html>\n \
+ <title> Code Coverage</title>\n \
+ <body>\n \
+ <h1><u>Downstream: CEPH Code Coverage</u></h1><br/>\n\
+ <TABLE BORDER=5>\n \
+ <TR>\n\
+ <TH COLSPAN=\"2\">\n\
+ <H3><BR>CEPH COVERAGE RUNS</H3>\n\
+ </TH>\n\
+ </TR>\n\
+ <TR>\n\
+ <TH>Coverage results </TH>\n\
+ <TH> Teuthology results </TH>\n\
+ </TR>\n\
+ {} \
+ </body>\n \
+ </html>\n ".format(body)
+ fd=open(genpath+"index.html", "w+")
+ fd.writelines(page)
+ fd.close()
+
+
+
+if __name__ =="__main__":
+ gen_html(genpath, N_ENTRIES)
--- /dev/null
+import os
+import sys
+import logging
+import subprocess
+import shlex
+
+import coverage_report
+
+log=logging.getLogger()
+hdlr=logging.FileHandler('/a/code_coverage_logs/coverage.log')
+formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
+hdlr.setFormatter(formatter)
+log.addHandler(hdlr)
+log.setLevel(logging.INFO)
+
+def generate_coverage_compile(cover_dir):
+ log.info("Inside generate_coverage_compile")
+ merged_total=cover_dir+"/total.info"
+ filtered_total=cover_dir+"/filtered_total.info"
+# cmd="lcov --rc lcov_branch_coverage=1 "
+ cmd="lcov "
+ ilist=os.listdir(cover_dir)
+ for ent in ilist:
+ if '*.info' in ent:
+ subprocess.Popen(
+ args=[
+ 'sed', '-i','s/\/sourcebuild/\/tmp\/build\/{}/'.format(\
+ os.path.basename(cover_dir)), \
+ cover_dir+"/"+ent]
+ )
+
+ for ent in ilist:
+ if 'info' in ent:
+ addstr= " -a " + cover_dir+"/"+ent
+ tstr=" -t "+ ent.split("_")[0]
+ cmd = cmd + addstr+tstr
+ log.info(cmd)
+ cmd=cmd + " -o "+ merged_total
+ log.info(cmd)
+ proc=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
+ proc_stdout=proc.communicate()[0]
+ log.info(proc_stdout)
+ assert(os.path.exists(merged_total))
+
+ proc=subprocess.Popen(
+ args=[
+ 'sed', '-i','s/\/sourcebuild/\/tmp\/build\/{}/'.format(\
+ os.path.basename(cover_dir)), \
+ merged_total]
+ )
+ proc_stdout=proc.communicate()[0]
+ log.info(proc_stdout)
+
+ cmd="lcov --remove "+merged_total+" '/usr/include/*' '/usr/lib/*' " +\
+ " -o "+ filtered_total
+ log.info(cmd)
+ proc=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
+ proc_stdout=proc.communicate()[0]
+ log.info(proc_stdout)
+ assert(os.path.exists(filtered_total))
+
+ cmd="genhtml " + " -o "+cover_dir+" {}".format(filtered_total)
+ log.info(cmd)
+ proc=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
+ proc_stdout=proc.communicate()[0]
+ log.info(proc_stdout)
+ assert(os.path.exists(cover_dir+"/index.html"))
+
+
+
+def main(args):
+ log = logging.getLogger(__name__)
+ basedir="/a/code_coverage_logs/"
+ coverdir=basedir+args['<run-name>']
+ generate_coverage_compile(coverdir)
+ gen_path="/a/code_coverage_logs/"
+ coverage_report.gen_html(gen_path, 10)
if not os.path.exists(localdir):
os.mkdir(localdir)
r = remote.get_tar_stream(remotedir, sudo=True)
- tar = tarfile.open(mode='r|gz', fileobj=r.stdout)
+ tar = tarfile.open(mode='r|gz', fileobj=r.stdout, dereference=True)
while True:
ti = tar.next()
if ti is None:
# ignore silently; easier to just create leading dirs below
# XXX this mean empty dirs are not transferred
pass
- elif ti.isfile():
+ elif ti.isfile() or ti.issym():
sub = safepath.munge(ti.name)
safepath.makedirs(root=localdir, path=os.path.dirname(sub))
tar.makefile(ti, targetpath=os.path.join(localdir, sub))
args.append('sudo')
args.extend([
'tar',
- 'cz',
+ 'czh',
'-f', remote_temp_path,
'-C', path,
'--',
args.append('sudo')
args.extend([
'tar',
- 'cz',
+ 'czh',
'-f', '-',
'-C', path,
'--',
import time
import logging
import subprocess
+import shlex
from collections import OrderedDict
from textwrap import dedent
from textwrap import fill
log = logging.getLogger(__name__)
+
UNFINISHED_STATUSES = ('queued', 'running', 'waiting')
def main(args):
log = logging.getLogger(__name__)
+
if args['--verbose']:
teuthology.log.setLevel(logging.DEBUG)
def results(archive_dir, name, email, timeout, dry_run):
+ log.info('Inside results.....')
starttime = time.time()
if timeout:
body=body,
)
finally:
+ log.info("Inside FINALLY")
generate_coverage(archive_dir, name)
+def generate_coverage_compile(cover_dir):
+ log.info("Inside generate_coverage_compile")
+ merged_total=cover_dir+"/total.info"
+ filtered_total=cover_dir+"/filtered_total.info"
+ cmd="lcov --rc lcov_branch_coverage=1 "
+ ilist=os.listdir(cover_dir)
+ for ent in ilist:
+ subprocess.Popen(
+ args=[
+ 'sed', '-i','s/\/sourcebuild/\/tmp\/build\/{}/'.format(\
+ os.path.basename(cover_dir)), \
+ cover_dir+"/"+ent]
+ )
+
+ for ent in ilist:
+ addstr= " -a " + cover_dir+"/"+ent
+ tstr=" -t "+ ent.split("_")[0]
+ cmd = cmd + addstr+tstr
+ cmd=cmd + " -o "+ merged_total
+ log.info(cmd)
+ proc=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
+ proc_stdout=proc.communicate()[0]
+ log.info(proc_stdout)
+ assert(os.path.exists(merged_total))
+
+ proc=subprocess.Popen(
+ args=[
+ 'sed', '-i','s/\/sourcebuild/\/tmp\/build\/{}/'.format(\
+ os.path.basename(cover_dir)), \
+ merged_total]
+ )
+ proc_stdout=proc.communicate()[0]
+ log.info(proc_stdout)
+
+ cmd="lcov --remove "+merged_total+" '/usr/include/*' '/usr/lib/*' " +\
+ " -o "+ filtered_total
+ log.info(cmd)
+ proc=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
+ proc_stdout=proc.communicate()[0]
+ log.info(proc_stdout)
+ assert(os.path.exists(filtered_total))
+
+ cmd="genhtml " + " -o "+cover_dir+" {}".format(filtered_total)
+ log.info(cmd)
+ proc=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
+ proc_stdout=proc.communicate()[0].strip()
+ log.info(proc_stdout)
+ assert(os.path.exists(cover_dir+"/index.html"))
+
def generate_coverage(archive_dir, name):
coverage_config_keys = ('coverage_output_dir', 'coverage_html_dir',
- 'coverage_tools_dir')
+ 'coverage_tools_dir')
for key in coverage_config_keys:
- if key not in config.to_dict():
- log.warn(
- "'%s' not in teuthology config; skipping coverage report",
- key)
- return
+ if key not in config.to_dict():
+ log.warn(
+ "'%s' not in teuthology config; skipping coverage report",
+ key)
+ return
log.info('starting coverage generation')
subprocess.Popen(
- args=[
- os.path.join(os.path.dirname(sys.argv[0]), 'teuthology-coverage'),
- '-v',
- '-o',
- os.path.join(config.coverage_output_dir, name),
- '--html-output',
- os.path.join(config.coverage_html_dir, name),
- '--cov-tools-dir',
- config.coverage_tools_dir,
- archive_dir,
- ],
+ args=[
+ os.path.join(os.path.dirname(sys.argv[0]), 'teuthology-coverage'),
+ '-v',
+ '-o',
+ os.path.join(config.coverage_output_dir, name),
+ '--html-output',
+ os.path.join(config.coverage_html_dir, name),
+ '--cov-tools-dir',
+ config.coverage_tools_dir,
+ archive_dir,
+ ],
)
if suite_repo:
teuth_config.ceph_qa_suite_git_url = suite_repo
- # overwrite the config values of os_{type,version} if corresponding
+ # overwrite the config values of os_{type,version} if corresponding
# command-line arguments are provided
if os_type:
config["os_type"] = os_type
config = dict((id_, a) for id_ in roles)
for role, ls in config.iteritems():
- (remote,) = ctx.cluster.only(role).remotes.iterkeys()
- log.info('Running commands on role %s host %s', role, remote.name)
- for c in ls:
- c.replace('$TESTDIR', testdir)
- if retry:
- with safe_while(sleep=sleep_for_retry, tries=retry,
- action="exec_with_retry") as proceed:
- while proceed():
- proc = remote.run(
- args=[
- 'sudo',
- 'TESTDIR={tdir}'.format(tdir=testdir),
- 'bash',
- '-c',
- c],
- timeout=timeout,
- check_status=False,
- wait=True,
- )
- if proc.exitstatus == 0:
- break
- else:
- remote.run(
- args=[
- 'sudo',
- 'TESTDIR={tdir}'.format(tdir=testdir),
- 'bash',
- '-c',
- c],
- timeout=timeout
- )
+ if 'mon' in role or 'osd' in role \
+ or 'client' in role:
+ (remote,) = ctx.cluster.only(role).remotes.iterkeys()
+ log.info('Running commands on role %s host %s', role, remote.name)
+ for c in ls:
+ c.replace('$TESTDIR', testdir)
+ if retry:
+ with safe_while(sleep=sleep_for_retry, tries=retry,
+ action="exec_with_retry") as proceed:
+ while proceed():
+ proc = remote.run(
+ args=[
+ 'sudo',
+ 'TESTDIR={tdir}'.format(tdir=testdir),
+ 'bash',
+ '-c',
+ c],
+ timeout=timeout,
+ check_status=False,
+ wait=True,
+ )
+ if proc.exitstatus == 0:
+ break
+ else:
+ remote.run(
+ args=[
+ 'sudo',
+ 'TESTDIR={tdir}'.format(tdir=testdir),
+ 'bash',
+ '-c',
+ c],
+ timeout=timeout
+ )
# Check for coredumps and pull binaries
fetch_binaries_for_coredumps(path, rem)
- log.info('Removing archive directory...')
+ log.info('Removing archive directory as sudo...')
run.wait(
ctx.cluster.run(
- args=['rm', '-rf', '--', archive_dir],
+ args=['sudo', 'rm', '-rf', '--', archive_dir],
wait=False,
),
)