#!/usr/bin/python3 import matplotlib.pyplot as plt from numpy import * import subprocess import argparse import glob import csv import sys import os # constants html_name="index.html" report_folder="ezbench_report/" class Benchmark: def __init__(self, full_name): self.full_name = full_name self.prevValue = -1 class BenchResult: def __init__(self, commit, benchmark, data_raw_file, img_src_name): self.commit = commit self.benchmark = benchmark self.data_raw_file = data_raw_file self.img_src_name = img_src_name self.data = [] class Commit: def __init__(self, sha1, full_name, compile_log): self.sha1 = sha1 self.full_name = full_name self.compile_log = compile_log self.results = [] benchmarks = [] commits = [] # parse the options parser = argparse.ArgumentParser() parser.add_argument("log_folder") args = parser.parse_args() # Look for the commit_list file os.chdir(args.log_folder) try: f = open( "commit_list", "r") try: commitsLines = f.readlines() finally: f.close() except IOError: sys.stderr.write("The log folder '{0}' does not contain a commit_list file\n".format(args.log_folder)) sys.exit(1) # Check that there are commits if (len(commitsLines) == 0): sys.stderr.write("The commit_list file is empty\n") sys.exit(2) # Gather all the information from the commits and generate the images commits_txt = "" table_entries_txt = "" for commitLine in commitsLines: full_name = commitLine.strip(' \t\n\r') sha1 = commitLine.split()[0] compile_log = sha1 + "_compile_log" commit = Commit(sha1, full_name, compile_log) # find all the benchmarks benchFiles = glob.glob("{sha1}_bench_*".format(sha1=commit.sha1)); benchs_txt = "" for benchFile in benchFiles: # Get the bench name bench_name = benchFile.replace("{sha1}_bench_".format(sha1=commit.sha1), "") if bench_name.endswith(".png"): continue # Find the right Benchmark or create one if none are found try: benchmark = next(b for b in benchmarks if b.full_name == bench_name) except StopIteration: benchmark = Benchmark(bench_name) benchmarks.append(benchmark) # Create the result object result = BenchResult(commit, benchmark, benchFile, report_folder + benchFile + ".png") # Read the data with open(benchFile, 'rt') as f: if (csv.Sniffer().has_header(f.read(1024))): f.seek(0) next(f) else: f.seek(0) reader = csv.reader(f) try: for row in reader: result.data.append(float(row[0])) except csv.Error as e: sys.stderr.write('file %s, line %d: %s' % (benchFile, reader.line_num, e)) sys.exit(3) # Add the result to the commit's results commit.results.append(result) # Add the commit to the list of commits commit.results = sorted(commit.results, key=lambda res: res.benchmark.full_name) commits.append(commit) # Sort the list of benchmarks benchmarks = sorted(benchmarks, key=lambda bench: bench.full_name) def getResultsBenchmarkDiffs(benchmark): prevValue = -1 results = [] # Compute a report per application i = 0 for commit in commits: for result in commit.results: if result.benchmark != benchmark: continue value = array(result.data).mean() if prevValue >= 0: diff = (value * 100.0 / prevValue) - 100.0 else: diff = 0 prevValue = value results.append([i, diff]) i = i + 1 return results # Create a folder for the results try: os.mkdir(report_folder) except OSError: print ("Error while creating the report folder") # Generate the trend graph plt.figure(figsize=(15,3)) plt.xlabel('Commit #') plt.ylabel('Perf. diff. with the prev. commit (%)') plt.grid(True) for i in range(len(benchmarks)): data = getResultsBenchmarkDiffs(benchmarks[i]) x_val = [x[0] for x in data] y_val = [x[1] for x in data] plt.plot(x_val, y_val, label=benchmarks[i].full_name) #plt.xticks(range(len(x)), x_val, rotation='vertical') plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=3, mode="expand", borderaxespad=0.) plt.savefig(report_folder + 'overview.svg', bbox_inches='tight') # Generate the images (HACK, do that in python!) for commit in commits: for result in commit.results: subprocess.call(['../../stats/test_report.R', result.data_raw_file, result.img_src_name]) # Generate the report html_template="""
Commit SHA1 | Geometric mean | {tbl_hdr_benchmarks}
---|