1
0
mirror of https://github.com/ARM-software/workload-automation.git synced 2025-09-03 11:52:36 +01:00

wa: pep8 fixes

This commit is contained in:
Marc Bonnici
2018-07-04 17:44:55 +01:00
parent 4b86fa5aee
commit 925bc8b719
36 changed files with 88 additions and 86 deletions

View File

@@ -88,7 +88,7 @@ class CpuStatesProcessor(OutputProcessor):
def initialize(self):
self.iteration_reports = OrderedDict()
def process_job_output(self, output, target_info, run_output): #pylint: disable=unused-argument
def process_job_output(self, output, target_info, run_output): # pylint: disable=unused-argument
trace_file = output.get_artifact_path('trace-cmd-txt')
if not trace_file:
self.logger.warning('Text trace does not appear to have been generated; skipping this iteration.')
@@ -110,7 +110,7 @@ class CpuStatesProcessor(OutputProcessor):
iteration_id = (output.id, output.label, output.iteration)
self.iteration_reports[iteration_id] = reports
#pylint: disable=too-many-locals,unused-argument
# pylint: disable=too-many-locals,unused-argument
def process_run_output(self, output, target_info):
if not self.iteration_reports:
self.logger.warning('No power state reports generated.')

View File

@@ -60,7 +60,7 @@ class CsvReportProcessor(OutputProcessor):
self.outputs_so_far = [] # pylint: disable=attribute-defined-outside-init
self.artifact_added = False
#pylint: disable=unused-argument
# pylint: disable=unused-argument
def process_job_output(self, output, target_info, run_output):
self.outputs_so_far.append(output)
self._write_outputs(self.outputs_so_far, run_output)
@@ -68,7 +68,7 @@ class CsvReportProcessor(OutputProcessor):
run_output.add_artifact('run_result_csv', 'results.csv', 'export')
self.artifact_added = True
def process_run_output(self, output, target_info): #pylint: disable=unused-argument
def process_run_output(self, output, target_info): # pylint: disable=unused-argument
self.outputs_so_far.append(output)
self._write_outputs(self.outputs_so_far, output)
if not self.artifact_added:

View File

@@ -115,7 +115,7 @@ class SqliteResultProcessor(OutputProcessor):
self._spec_oid = None
self._run_initialized = False
def export_job_output(self, job_output, target_info, run_output): #pylint: disable=unused-argument
def export_job_output(self, job_output, target_info, run_output): # pylint: disable=unused-argument
if not self._run_initialized:
self._init_run(run_output)
@@ -128,7 +128,7 @@ class SqliteResultProcessor(OutputProcessor):
with self._open_connection() as conn:
conn.executemany('INSERT INTO metrics VALUES (?,?,?,?,?,?)', metrics)
def export_run_output(self, run_output, target_info): #pylint: disable=unused-argument
def export_run_output(self, run_output, target_info): # pylint: disable=unused-argument
if not self._run_initialized:
self._init_run(run_output)

View File

@@ -30,7 +30,7 @@ class StatusTxtReporter(OutputProcessor):
"""
def process_run_output(self, output, target_info): #pylint: disable=unused-argument
def process_run_output(self, output, target_info): # pylint: disable=unused-argument
counter = Counter()
for jo in output.jobs:
counter[jo.status] += 1

View File

@@ -61,7 +61,7 @@ class TargzProcessor(OutputProcessor):
self.logger.debug('Registering RUN_FINALIZED handler.')
signal.connect(self.delete_output_directory, signal.RUN_FINALIZED, priority=-100)
def export_run_output(self, run_output, target_info): #pylint: disable=unused-argument
def export_run_output(self, run_output, target_info): # pylint: disable=unused-argument
if self.outfile:
outfile_path = self.outfile.format(**run_output.info.to_pod())
else:

View File

@@ -30,7 +30,7 @@ class UxperfProcessor(OutputProcessor):
a agenda file by setting ``markers_enabled`` for the workload to ``True``.
'''
#pylint: disable=too-many-locals,unused-argument
# pylint: disable=too-many-locals,unused-argument
def process_job_output(self, output, target_info, job_output):
logcat = output.get_artifact('logcat')
if not logcat: