Commit c8e9d551 authored by John Zhang's avatar John Zhang

output processing for perfevent callback

parent 2ed6de2a
......@@ -122,7 +122,7 @@ void perfevent_read(Callback* cb) {
fprintf(stdout, "============================ Tabulate Statistics ============================\n");
for (i=0; i < cb->perfevent_num_fds; i++)
fprintf(stdout, "%s ", cb->perfevent_fds[i].name);
fprintf(stdout, "%s\t", cb->perfevent_fds[i].name);
fprintf(stdout, "\n");
for (i=0; i < cb->perfevent_num_fds; i++) {
......@@ -131,10 +131,10 @@ void perfevent_read(Callback* cb) {
ret = read(cb->perfevent_fds[i].fd, values, sizeof(values));
if (ret < sizeof(values)) {
if (ret == -1)
err(1, "cannot read results: %s", strerror(errno));
else
warnx("could not read event%d", i);
if (ret == -1)
err(1, "cannot read results: %s", strerror(errno));
else
warnx("could not read event%d", i);
}
/*
* scaling is systematic because we may be sharing the PMU and
......@@ -144,12 +144,12 @@ void perfevent_read(Callback* cb) {
ratio = perf_scale_ratio(values);
if (ratio == 1.0)
fprintf(stdout, "%lld ", (long long int) val);
else
if (ratio == 0.0)
fprintf(stdout, "NO_VALUE ");
fprintf(stdout, "%lld\t", (long long int) val);
else
fprintf(stdout, "%lld-SCALED-%.2f%% ", (long long int) val, ratio*100.0);
if (ratio == 0.0)
fprintf(stdout, "NO_VALUE\t");
else
fprintf(stdout, "%lld-SCALED-%.2f%%\t", (long long int) val, ratio*100.0);
}
fprintf(stdout, "\n=============================================================================\n");
}
......
......@@ -32,7 +32,7 @@ class Callback:
setattr(self, key, conf_d[key])
libext = '.dylib' if sys.platform == 'darwin' else '.so'
self.dylib = 'libcb_%(name)s' % conf_d + libext # full path set later at compile_dylib
self.dylib = ts.output_dir / ('libcb_%(name)s' % conf_d + libext)
self.configure(ts)
......@@ -40,8 +40,6 @@ class Callback:
pass
def compile_dylib(self, output_dir, env):
self.dylib = output_dir / self.dylib
cmd = []
cmd.append(env.get('CC', 'clang'))
......@@ -88,7 +86,7 @@ class Callback:
class ClockCallback(Callback):
def extract_stat(self, stdout, stderr):
return float(stdout.split()[-1])
return {'time': float(stdout.split()[-1])}
class PrintCallback(Callback):
......@@ -112,11 +110,16 @@ class PerfEventCallback(Callback):
self.flags.append('-pthread')
def extract_stat(self, stdout, stderr):
raise NotImplementedError
lines = stdout.split('\n')
idx_start = lines.index('============================ Tabulate Statistics ============================')
idx_end = lines.index('=============================================================================')
events = lines[idx_start + 1].split('\t')
data = list(map(int, lines[idx_start + 2].split('\t')[:-1]))
return dict(zip(events, data))
CALLBACK_BY_NAME = {
'clock': ClockCallback,
'print': PrintCallback,
'perfevnet': PerfEventCallback,
'perfevent': PerfEventCallback,
}
......@@ -137,10 +137,10 @@ class TaskSet:
try:
ts.tasks.append(Task(ts, task_name, **task_conf))
except Exception as e:
task_print(task_name,
crayons.red('parsing configuration failed.'))
logger.critical(crayons.red(str(e)))
# ts.tasks.append(Task(ts, task_name, **task_conf))
# task_print(task_name,
# crayons.red('parsing configuration failed.'))
# logger.critical(crayons.red(str(e)))
ts.tasks.append(Task(ts, task_name, **task_conf))
return ts
......@@ -183,8 +183,8 @@ class TaskSet:
res, t_proc = task.run(target)
task.add_datapoint(res.stdout, res.stderr, t_proc)
data[task].append({
'stdout': str(res.stdout, encoding='utf-8'),
'stderr': str(res.stderr, encoding='utf-8'),
'stdout': res.stdout,
'stderr': res.stderr,
't_proc': t_proc
})
except ExecutionFailure as e:
......@@ -208,10 +208,6 @@ class TaskSet:
with self.resfile.open('w') as fp:
json.dump(record, fp, indent=2, separators=(', ', ': '))
# TODO: restructure this
for task in data:
task.aggregate_datapoint()
self.results = {task.name: task.get_result() for task in data}
return self.results
......@@ -261,8 +257,7 @@ class Task:
self.config = conf
self.data_callback = []
self.data_t_proc = []
self.stats = []
def compile(self):
if self.lang_cls.compiled:
......@@ -279,20 +274,13 @@ class Task:
# Results and data should not be part of a taskset/task,
# but rather the Result should be *about* a TaskSet
def add_datapoint(self, stdout, stderr, t_proc):
self.data_callback.append(self.callback.extract_stat(stdout, stderr))
self.data_t_proc.append(float(t_proc))
def aggregate_datapoint(self):
self.result_callback = Result(self.data_callback,
"{}:{} callback".format(self.taskset.name,
self.name))
self.result_t_proc = Result(self.data_t_proc,
"{}:{} t_proc".format(self.taskset.name,
self.name))
stat_d = self.callback.extract_stat(stdout, stderr)
stat_d['t_proc'] = float(t_proc)
self.stats.append(stat_d)
def get_result(self):
return SimpleNamespace(callback=self.result_callback,
t_proc=self.result_t_proc)
name = "{}:{} callback".format(self.taskset.name, self.name)
return Result(self.stats, name)
def __str__(self):
return self.name
......
......@@ -40,11 +40,11 @@ class ExecutionFailure(Exception):
with errlog_file.open('w') as fp:
fp.write('---------------- stdout ----------------\n')
if self.exec_res.stdout:
fp.write(str(self.exec_res.stdout, encoding='utf-8'))
fp.write(self.exec_res.stdout)
fp.write('\n')
fp.write('---------------- stderr ----------------\n')
if self.exec_res.stderr:
fp.write(str(self.exec_res.stderr, encoding='utf-8'))
fp.write(self.exec_res.stderr)
def __str__(self):
return "Executing '%s' failed." % \
......@@ -62,7 +62,9 @@ def run_in_subproc(cmd, env=None, **kwds):
res = subproc.Popen(cmd, stdout=subproc.PIPE, stderr=subproc.PIPE,
env=env, **kwds)
so, se = res.communicate()
res = SimpleNamespace(stdout=so, stderr=se, returncode=res.returncode)
res = SimpleNamespace(stdout=str(so, encoding='utf-8'),
stderr=str(se, encoding='utf=8'),
returncode=res.returncode)
ru1 = resource.getrusage(resource.RUSAGE_CHILDREN)
t1 = ru1.ru_utime + ru1.ru_stime
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment