mirror of
https://github.com/ARM-software/workload-automation.git
synced 2025-04-16 07:40:48 +01:00
framework/output: save classifiers in Result
Save classifiers at Result as well as Metric level. Reason: when processing output, one might want to filter complete results, as well as individual metrics. While it is in theory possible to get the classifiers for a job by simply extracting the common classifiers between all metrics, this fails when there are no metrics generated for a job (note that one might still want to process the output in this case, e.g. for the artifacts).
This commit is contained in:
parent
c5659d51f1
commit
1e21bd2ca8
@ -131,7 +131,8 @@ class ExecutionContext(object):
|
|||||||
if not self.job_queue:
|
if not self.job_queue:
|
||||||
raise RuntimeError('No jobs to run')
|
raise RuntimeError('No jobs to run')
|
||||||
self.current_job = self.job_queue.pop(0)
|
self.current_job = self.job_queue.pop(0)
|
||||||
self.current_job.output = init_job_output(self.run_output, self.current_job)
|
job_output = init_job_output(self.run_output, self.current_job)
|
||||||
|
self.current_job.set_output(job_output)
|
||||||
self.update_job_state(self.current_job)
|
self.update_job_state(self.current_job)
|
||||||
self.tm.start()
|
self.tm.start()
|
||||||
return self.current_job
|
return self.current_job
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
|
from copy import copy
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from wa.framework import pluginloader, signal, instruments
|
from wa.framework import pluginloader, signal, instruments
|
||||||
@ -56,6 +57,10 @@ class Job(object):
|
|||||||
else:
|
else:
|
||||||
self.workload = self._workload_cache[self.id]
|
self.workload = self._workload_cache[self.id]
|
||||||
|
|
||||||
|
def set_output(self, output):
|
||||||
|
output.classifiers = copy(self.classifiers)
|
||||||
|
self.output = output
|
||||||
|
|
||||||
def initialize(self, context):
|
def initialize(self, context):
|
||||||
self.logger.info('Initializing job {} [{}]'.format(self.id, self.iteration))
|
self.logger.info('Initializing job {} [{}]'.format(self.id, self.iteration))
|
||||||
with signal.wrap('WORKLOAD_INITIALIZED', self, context):
|
with signal.wrap('WORKLOAD_INITIALIZED', self, context):
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
from collections import OrderedDict
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
@ -58,6 +59,19 @@ class Output(object):
|
|||||||
return []
|
return []
|
||||||
return self.result.artifacts
|
return self.result.artifacts
|
||||||
|
|
||||||
|
@property
|
||||||
|
def classifiers(self):
|
||||||
|
if self.result is None:
|
||||||
|
return OrderedDict()
|
||||||
|
return self.result.classifiers
|
||||||
|
|
||||||
|
@classifiers.setter
|
||||||
|
def classifiers(self, value):
|
||||||
|
if self.result is None:
|
||||||
|
msg ='Attempting to set classifiers before output has been set'
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
self.result.classifiers = value
|
||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
self.basepath = path
|
self.basepath = path
|
||||||
self.result = None
|
self.result = None
|
||||||
@ -240,6 +254,7 @@ class Result(object):
|
|||||||
instance.metrics = [Metric.from_pod(m) for m in pod['metrics']]
|
instance.metrics = [Metric.from_pod(m) for m in pod['metrics']]
|
||||||
instance.artifacts = [Artifact.from_pod(a) for a in pod['artifacts']]
|
instance.artifacts = [Artifact.from_pod(a) for a in pod['artifacts']]
|
||||||
instance.events = [Event.from_pod(e) for e in pod['events']]
|
instance.events = [Event.from_pod(e) for e in pod['events']]
|
||||||
|
instance.classifiers = pod.get('classifiers', OrderedDict())
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -248,6 +263,7 @@ class Result(object):
|
|||||||
self.metrics = []
|
self.metrics = []
|
||||||
self.artifacts = []
|
self.artifacts = []
|
||||||
self.events = []
|
self.events = []
|
||||||
|
self.classifiers = OrderedDict()
|
||||||
|
|
||||||
def add_metric(self, name, value, units=None, lower_is_better=False,
|
def add_metric(self, name, value, units=None, lower_is_better=False,
|
||||||
classifiers=None):
|
classifiers=None):
|
||||||
@ -282,6 +298,7 @@ class Result(object):
|
|||||||
metrics=[m.to_pod() for m in self.metrics],
|
metrics=[m.to_pod() for m in self.metrics],
|
||||||
artifacts=[a.to_pod() for a in self.artifacts],
|
artifacts=[a.to_pod() for a in self.artifacts],
|
||||||
events=[e.to_pod() for e in self.events],
|
events=[e.to_pod() for e in self.events],
|
||||||
|
classifiers=copy(self.classifiers),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user