1
0
mirror of https://github.com/ARM-software/workload-automation.git synced 2025-02-20 20:09:11 +00:00

framework/output: save classifiers in Result

Save classifiers at Result as well as Metric level. Reason: when
processing output, one might want to filter complete results, as well as
individual metrics. While it is in theory possible to get the
classifiers for a job by simply extracting the common classifiers
between all metrics, this fails when there are no metrics generated for
a job (note that one might still want to process the output in this
case, e.g. for the artifacts).
This commit is contained in:
Sergei Trofimov 2018-02-15 10:14:57 +00:00 committed by Marc Bonnici
parent c5659d51f1
commit 1e21bd2ca8
3 changed files with 24 additions and 1 deletions

View File

@ -131,7 +131,8 @@ class ExecutionContext(object):
if not self.job_queue:
raise RuntimeError('No jobs to run')
self.current_job = self.job_queue.pop(0)
self.current_job.output = init_job_output(self.run_output, self.current_job)
job_output = init_job_output(self.run_output, self.current_job)
self.current_job.set_output(job_output)
self.update_job_state(self.current_job)
self.tm.start()
return self.current_job

View File

@ -1,4 +1,5 @@
import logging
from copy import copy
from datetime import datetime
from wa.framework import pluginloader, signal, instruments
@ -56,6 +57,10 @@ class Job(object):
else:
self.workload = self._workload_cache[self.id]
def set_output(self, output):
output.classifiers = copy(self.classifiers)
self.output = output
def initialize(self, context):
self.logger.info('Initializing job {} [{}]'.format(self.id, self.iteration))
with signal.wrap('WORKLOAD_INITIALIZED', self, context):

View File

@ -1,6 +1,7 @@
import logging
import os
import shutil
from collections import OrderedDict
from copy import copy
from datetime import datetime
@ -58,6 +59,19 @@ class Output(object):
return []
return self.result.artifacts
@property
def classifiers(self):
if self.result is None:
return OrderedDict()
return self.result.classifiers
@classifiers.setter
def classifiers(self, value):
if self.result is None:
msg ='Attempting to set classifiers before output has been set'
raise RuntimeError(msg)
self.result.classifiers = value
def __init__(self, path):
self.basepath = path
self.result = None
@ -240,6 +254,7 @@ class Result(object):
instance.metrics = [Metric.from_pod(m) for m in pod['metrics']]
instance.artifacts = [Artifact.from_pod(a) for a in pod['artifacts']]
instance.events = [Event.from_pod(e) for e in pod['events']]
instance.classifiers = pod.get('classifiers', OrderedDict())
return instance
def __init__(self):
@ -248,6 +263,7 @@ class Result(object):
self.metrics = []
self.artifacts = []
self.events = []
self.classifiers = OrderedDict()
def add_metric(self, name, value, units=None, lower_is_better=False,
classifiers=None):
@ -282,6 +298,7 @@ class Result(object):
metrics=[m.to_pod() for m in self.metrics],
artifacts=[a.to_pod() for a in self.artifacts],
events=[e.to_pod() for e in self.events],
classifiers=copy(self.classifiers),
)