mirror of
https://github.com/ARM-software/workload-automation.git
synced 2025-09-02 03:12:34 +01:00
Implment output processing
- Implemented result processor infrastructured - Corrected some status tracking issues (differed between states and output). - Added "csv" and "status" result processors (these will be the default enabled).
This commit is contained in:
0
wa/processors/__init__.py
Normal file
0
wa/processors/__init__.py
Normal file
87
wa/processors/csvproc.py
Normal file
87
wa/processors/csvproc.py
Normal file
@@ -0,0 +1,87 @@
|
||||
import csv
|
||||
|
||||
from wa import ResultProcessor, Parameter
|
||||
from wa.framework.exception import ConfigError
|
||||
from wa.utils.types import list_of_strings
|
||||
|
||||
|
||||
class CsvReportProcessor(ResultProcessor):
|
||||
|
||||
name = 'csv'
|
||||
description = """
|
||||
Creates a ``results.csv`` in the output directory containing results for
|
||||
all iterations in CSV format, each line containing a single metric.
|
||||
|
||||
"""
|
||||
|
||||
parameters = [
|
||||
Parameter('use_all_classifiers', kind=bool, default=False,
|
||||
global_alias='use_all_classifiers',
|
||||
description="""
|
||||
If set to ``True``, this will add a column for every classifier
|
||||
that features in at least one collected metric.
|
||||
|
||||
.. note:: This cannot be ``True`` if ``extra_columns`` is set.
|
||||
|
||||
"""),
|
||||
Parameter('extra_columns', kind=list_of_strings,
|
||||
description="""
|
||||
List of classifiers to use as columns.
|
||||
|
||||
.. note:: This cannot be set if ``use_all_classifiers`` is
|
||||
``True``.
|
||||
|
||||
"""),
|
||||
]
|
||||
|
||||
def validate(self):
|
||||
super(CsvReportProcessor, self).validate()
|
||||
if self.use_all_classifiers and self.extra_columns:
|
||||
msg = 'extra_columns cannot be specified when '\
|
||||
'use_all_classifiers is True'
|
||||
raise ConfigError(msg)
|
||||
|
||||
def initialize(self):
|
||||
self.results_so_far = [] # pylint: disable=attribute-defined-outside-init
|
||||
self.artifact_added = False
|
||||
|
||||
def process_job_output(self, output, target_info, run_output):
|
||||
self.results_so_far.append(output)
|
||||
self._write_results(self.results_so_far, run_output)
|
||||
if not self.artifact_added:
|
||||
run_output.add_artifact('run_result_csv', 'results.csv', 'export')
|
||||
self.artifact_added = True
|
||||
|
||||
def process_run_result(self, output, target_info):
|
||||
self.results_so_far.append(output.result)
|
||||
self._write_results(self.rsults_so_far, output)
|
||||
if not self.artifact_added:
|
||||
output.add_artifact('run_result_csv', 'results.csv', 'export')
|
||||
self.artifact_added = True
|
||||
|
||||
def _write_results(self, results, output):
|
||||
if self.use_all_classifiers:
|
||||
classifiers = set([])
|
||||
for result in results:
|
||||
for metric in result.metrics:
|
||||
classifiers.update(metric.classifiers.keys())
|
||||
extra_columns = list(classifiers)
|
||||
elif self.extra_columns:
|
||||
extra_columns = self.extra_columns
|
||||
else:
|
||||
extra_columns = []
|
||||
|
||||
outfile = output.get_path('results.csv')
|
||||
with open(outfile, 'wb') as wfh:
|
||||
writer = csv.writer(wfh)
|
||||
writer.writerow(['id', 'workload', 'iteration', 'metric', ] +
|
||||
extra_columns + ['value', 'units'])
|
||||
|
||||
for o in results:
|
||||
header = [o.id, o.label, o.iteration]
|
||||
for metric in o.result.metrics:
|
||||
row = (header + [metric.name] +
|
||||
[str(metric.classifiers.get(c, ''))
|
||||
for c in extra_columns] +
|
||||
[str(metric.value), metric.units or ''])
|
||||
writer.writerow(row)
|
59
wa/processors/status.py
Normal file
59
wa/processors/status.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# Copyright 2013-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
# pylint: disable=R0201
|
||||
import os
|
||||
import time
|
||||
from collections import Counter
|
||||
|
||||
from wa import ResultProcessor, Status
|
||||
from wa.utils.misc import write_table
|
||||
|
||||
|
||||
class StatusTxtReporter(ResultProcessor):
|
||||
name = 'status'
|
||||
description = """
|
||||
Outputs a txt file containing general status information about which runs
|
||||
failed and which were successful
|
||||
|
||||
"""
|
||||
|
||||
def process_run_output(self, output, target_info):
|
||||
counter = Counter()
|
||||
for jo in output.jobs:
|
||||
counter[jo.status] += 1
|
||||
|
||||
outfile = output.get_path('status.txt')
|
||||
self.logger.info('Status available in {}'.format(outfile))
|
||||
with open(outfile, 'w') as wfh:
|
||||
wfh.write('Run name: {}\n'.format(output.info.run_name))
|
||||
wfh.write('Run status: {}\n'.format(output.status))
|
||||
wfh.write('Date: {}\n'.format(time.strftime("%c")))
|
||||
if output.events:
|
||||
wfh.write('Events:\n')
|
||||
for event in output.events:
|
||||
wfh.write('\t{}\n'.format(event.summary))
|
||||
|
||||
txt = '{}/{} iterations completed without error\n'
|
||||
wfh.write(txt.format(counter[Status.OK], len(output.jobs)))
|
||||
wfh.write('\n')
|
||||
status_lines = [map(str, [o.id, o.label, o.iteration, o.status,
|
||||
o.event_summary])
|
||||
for o in output.jobs]
|
||||
write_table(status_lines, wfh, align='<<>><')
|
||||
|
||||
output.add_artifact('run_status_summary', 'status.txt', 'export')
|
||||
|
Reference in New Issue
Block a user