1
0
mirror of https://github.com/ARM-software/workload-automation.git synced 2024-10-05 18:31:12 +01:00

pep8: Ignore line break before binary operator

PEP8 has switched its guidance [1] for where a line break should occur
in relation to a binary operator, so don't raise this warning for
new code and update the code base to follow the new style.

[1] https://www.python.org/dev/peps/pep-0008/#should-a-line-break-before-or-after-a-binary-operator
This commit is contained in:
Marc Bonnici 2020-10-19 18:09:04 +01:00 committed by setrofim
parent fbb84eca72
commit aa4df95a69
19 changed files with 63 additions and 63 deletions

View File

@ -6,7 +6,7 @@ DEFAULT_DIRS=(
EXCLUDE=wa/tests,wa/framework/target/descriptor.py
EXCLUDE_COMMA=
IGNORE=E501,E265,E266,W391,E401,E402,E731,W504,W605,F401
IGNORE=E501,E265,E266,W391,E401,E402,E731,W503,W605,F401
if ! hash flake8 2>/dev/null; then
echo "flake8 not found in PATH"

View File

@ -106,8 +106,8 @@ class CreateDatabaseSubcommand(SubCommand):
def execute(self, state, args): # pylint: disable=too-many-branches
if not psycopg2:
raise CommandError(
'The module psycopg2 is required for the wa ' +
'create database command.')
'The module psycopg2 is required for the wa '
+ 'create database command.')
if args.dbname == 'postgres':
raise ValueError('Databasename to create cannot be postgres.')
@ -131,8 +131,8 @@ class CreateDatabaseSubcommand(SubCommand):
config = yaml.load(config_file)
if 'postgres' in config and not args.force_update_config:
raise CommandError(
"The entry 'postgres' already exists in the config file. " +
"Please specify the -F flag to force an update.")
"The entry 'postgres' already exists in the config file. "
+ "Please specify the -F flag to force an update.")
possible_connection_errors = [
(
@ -261,8 +261,8 @@ class CreateDatabaseSubcommand(SubCommand):
else:
if not self.force:
raise CommandError(
"Database {} already exists. ".format(self.dbname) +
"Please specify the -f flag to create it from afresh."
"Database {} already exists. ".format(self.dbname)
+ "Please specify the -f flag to create it from afresh."
)
def _create_database_postgres(self):
@ -400,14 +400,14 @@ class CreateWorkloadSubcommand(SubCommand):
self.parser.add_argument('name', metavar='NAME',
help='Name of the workload to be created')
self.parser.add_argument('-p', '--path', metavar='PATH', default=None,
help='The location at which the workload will be created. If not specified, ' +
'this defaults to "~/.workload_automation/plugins".')
help='The location at which the workload will be created. If not specified, '
+ 'this defaults to "~/.workload_automation/plugins".')
self.parser.add_argument('-f', '--force', action='store_true',
help='Create the new workload even if a workload with the specified ' +
'name already exists.')
help='Create the new workload even if a workload with the specified '
+ 'name already exists.')
self.parser.add_argument('-k', '--kind', metavar='KIND', default='basic', choices=list(create_funcs.keys()),
help='The type of workload to be created. The available options ' +
'are: {}'.format(', '.join(list(create_funcs.keys()))))
help='The type of workload to be created. The available options '
+ 'are: {}'.format(', '.join(list(create_funcs.keys()))))
def execute(self, state, args): # pylint: disable=R0201
where = args.path or 'local'
@ -430,8 +430,8 @@ class CreatePackageSubcommand(SubCommand):
self.parser.add_argument('name', metavar='NAME',
help='Name of the package to be created')
self.parser.add_argument('-p', '--path', metavar='PATH', default=None,
help='The location at which the new package will be created. If not specified, ' +
'current working directory will be used.')
help='The location at which the new package will be created. If not specified, '
+ 'current working directory will be used.')
self.parser.add_argument('-f', '--force', action='store_true',
help='Create the new package even if a file or directory with the same name '
'already exists at the specified location.')

View File

@ -96,8 +96,8 @@ class RecordCommand(Command):
if args.workload and args.output:
self.logger.error("Output file cannot be specified with Workload")
sys.exit()
if not args.workload and (args.setup or args.extract_results or
args.teardown or args.all):
if not args.workload and (args.setup or args.extract_results
or args.teardown or args.all):
self.logger.error("Cannot specify a recording stage without a Workload")
sys.exit()
if args.workload and not any([args.all, args.teardown, args.extract_results, args.run, args.setup]):

View File

@ -84,9 +84,9 @@ class PluginCache(object):
'defined in a config file, move the entry content into the top level'
raise ConfigError(msg.format((plugin_name)))
if (not self.loader.has_plugin(plugin_name) and
plugin_name not in self.targets and
plugin_name not in GENERIC_CONFIGS):
if (not self.loader.has_plugin(plugin_name)
and plugin_name not in self.targets
and plugin_name not in GENERIC_CONFIGS):
msg = 'configuration provided for unknown plugin "{}"'
raise ConfigError(msg.format(plugin_name))
@ -95,8 +95,8 @@ class PluginCache(object):
raise ConfigError(msg.format(plugin_name, repr(values), type(values)))
for name, value in values.items():
if (plugin_name not in GENERIC_CONFIGS and
name not in self.get_plugin_parameters(plugin_name)):
if (plugin_name not in GENERIC_CONFIGS
and name not in self.get_plugin_parameters(plugin_name)):
msg = "'{}' is not a valid parameter for '{}'"
raise ConfigError(msg.format(name, plugin_name))

View File

@ -128,8 +128,8 @@ class ExecutionContext(object):
self.run_state.status = status
self.run_output.status = status
self.run_output.info.end_time = datetime.utcnow()
self.run_output.info.duration = (self.run_output.info.end_time -
self.run_output.info.start_time)
self.run_output.info.duration = (self.run_output.info.end_time
- self.run_output.info.start_time)
self.write_output()
def finalize(self):

View File

@ -268,8 +268,8 @@ class RunOutput(Output, RunOutputCommon):
self._combined_config = None
self.jobs = []
self.job_specs = []
if (not os.path.isfile(self.statefile) or
not os.path.isfile(self.infofile)):
if (not os.path.isfile(self.statefile)
or not os.path.isfile(self.infofile)):
msg = '"{}" does not exist or is not a valid WA output directory.'
raise ValueError(msg.format(self.basepath))
self.reload()

View File

@ -166,8 +166,8 @@ class AndroidAssistant(object):
else:
parser = LogcatParser()
for event in parser.parse(outfile):
if (event.tag == self._logcat_marker_tag and
event.message == self._logcat_marker_msg):
if (event.tag == self._logcat_marker_tag
and event.message == self._logcat_marker_msg):
return True
return False
@ -275,8 +275,8 @@ class LogcatPoller(threading.Thread):
counter = self._start_marker
for event in parser.parse(outfile):
message = self._logcat_marker_msg.split(':')[0]
if not (event.tag == self._logcat_marker_tag and
event.message.split(':')[0] == message):
if not (event.tag == self._logcat_marker_tag
and event.message.split(':')[0] == message):
continue
number = int(event.message.split(':')[1])

View File

@ -201,16 +201,16 @@ class DelayInstrument(Instrument):
reading = self.target.read_int(self.temperature_file)
def validate(self):
if (self.temperature_between_specs is not None and
self.fixed_between_specs is not None):
if (self.temperature_between_specs is not None
and self.fixed_between_specs is not None):
raise ConfigError('Both fixed delay and thermal threshold specified for specs.')
if (self.temperature_between_jobs is not None and
self.fixed_between_jobs is not None):
if (self.temperature_between_jobs is not None
and self.fixed_between_jobs is not None):
raise ConfigError('Both fixed delay and thermal threshold specified for jobs.')
if (self.temperature_before_start is not None and
self.fixed_before_start is not None):
if (self.temperature_before_start is not None
and self.fixed_before_start is not None):
raise ConfigError('Both fixed delay and thermal threshold specified before start.')
if not any([self.temperature_between_specs, self.fixed_between_specs,

View File

@ -169,9 +169,9 @@ class SysfsExtractor(Instrument):
for paths in self.device_and_host_paths:
after_dir = paths[self.AFTER_PATH]
dev_dir = paths[self.DEVICE_PATH].strip('*') # remove potential trailing '*'
if (not os.listdir(after_dir) and
self.target.file_exists(dev_dir) and
self.target.list_directory(dev_dir)):
if (not os.listdir(after_dir)
and self.target.file_exists(dev_dir)
and self.target.list_directory(dev_dir)):
self.logger.error('sysfs files were not pulled from the device.')
self.device_and_host_paths.remove(paths) # Path is removed to skip diffing it
for dev_dir, before_dir, after_dir, diff_dir in self.device_and_host_paths:

View File

@ -134,8 +134,8 @@ class CpuStatesProcessor(OutputProcessor):
parallel_rows.append([job_id, workload, iteration] + record)
for state in sorted(powerstate_report.state_stats):
stats = powerstate_report.state_stats[state]
powerstate_rows.append([job_id, workload, iteration, state] +
['{:.3f}'.format(s if s is not None else 0)
powerstate_rows.append([job_id, workload, iteration, state]
+ ['{:.3f}'.format(s if s is not None else 0)
for s in stats])
outpath = output.get_path('parallel-stats.csv')

View File

@ -90,8 +90,8 @@ class CsvReportProcessor(OutputProcessor):
outfile = output.get_path('results.csv')
with csvwriter(outfile) as writer:
writer.writerow(['id', 'workload', 'iteration', 'metric', ] +
extra_columns + ['value', 'units'])
writer.writerow(['id', 'workload', 'iteration', 'metric', ]
+ extra_columns + ['value', 'units'])
for o in outputs:
if o.kind == 'job':
@ -106,8 +106,8 @@ class CsvReportProcessor(OutputProcessor):
'Output of kind "{}" unrecognised by csvproc'.format(o.kind))
for metric in o.result.metrics:
row = (header + [metric.name] +
[str(metric.classifiers.get(c, ''))
for c in extra_columns] +
[str(metric.value), metric.units or ''])
row = (header + [metric.name]
+ [str(metric.classifiers.get(c, ''))
for c in extra_columns]
+ [str(metric.value), metric.units or ''])
writer.writerow(row)

View File

@ -124,8 +124,8 @@ class PostgresqlResultProcessor(OutputProcessor):
if not psycopg2:
raise ImportError(
'The psycopg2 module is required for the ' +
'Postgresql Output Processor: {}'.format(import_error_msg))
'The psycopg2 module is required for the '
+ 'Postgresql Output Processor: {}'.format(import_error_msg))
# N.B. Typecasters are for postgres->python and adapters the opposite
self.connect_to_database()
@ -515,8 +515,8 @@ class PostgresqlResultProcessor(OutputProcessor):
self.conn = connect(dsn=dsn)
except Psycopg2Error as e:
raise OutputProcessorError(
"Database error, if the database doesn't exist, " +
"please use 'wa create database' to create the database: {}".format(e))
"Database error, if the database doesn't exist, "
+ "please use 'wa create database' to create the database: {}".format(e))
self.cursor = self.conn.cursor()
self.verify_schema_versions()

View File

@ -95,8 +95,8 @@ def diff_sysfs_dirs(before, after, result): # pylint: disable=R0914
logger.debug('Token length mismatch in {} on line {}'.format(bfile, i))
dfh.write('xxx ' + bline)
continue
if ((len([c for c in bchunks if c.strip()]) == len([c for c in achunks if c.strip()]) == 2) and
(bchunks[0] == achunks[0])):
if ((len([c for c in bchunks if c.strip()]) == len([c for c in achunks if c.strip()]) == 2)
and (bchunks[0] == achunks[0])):
# if there are only two columns and the first column is the
# same, assume it's a "header" column and do not diff it.
dchunks = [bchunks[0]] + [diff_tokens(b, a) for b, a in zip(bchunks[1:], achunks[1:])]

View File

@ -79,7 +79,7 @@ def init(verbosity=logging.INFO, color=True, indent_with=4,
root_logger.addHandler(_console_handler)
buffer_capacity = int(os.getenv('WA_LOG_BUFFER_CAPACITY',
str(DEFAULT_INIT_BUFFER_CAPACITY)))
str(DEFAULT_INIT_BUFFER_CAPACITY)))
_init_handler = InitHandler(buffer_capacity)
_init_handler.setLevel(logging.DEBUG)
root_logger.addHandler(_init_handler)

View File

@ -404,8 +404,8 @@ def istextfile(fileobj, blocksize=512):
If more than 30% of the chars in the block are non-text, or there
are NUL ('\x00') bytes in the block, assume this is a binary file.
"""
_text_characters = (b''.join(chr(i) for i in range(32, 127)) +
b'\n\r\t\f\b')
_text_characters = (b''.join(chr(i) for i in range(32, 127))
+ b'\n\r\t\f\b')
block = fileobj.read(blocksize)
if b'\x00' in block:

View File

@ -155,8 +155,8 @@ class ReventRecording(object):
else: # not streaming
if not self._events:
self._duration = 0
self._duration = (self._events[-1].time -
self._events[0].time).total_seconds()
self._duration = (self._events[-1].time
- self._events[0].time).total_seconds()
return self._duration
@property

View File

@ -343,7 +343,7 @@ def _read_pod(fh, fmt=None):
fmt = os.path.splitext(fh.name)[1].lower().strip('.')
if fmt == '':
# Special case of no given file extension
message = ("Could not determine format " +
message = ("Could not determine format "
"from file extension for \"{}\". "
"Please specify it or modify the fmt parameter.")
raise ValueError(message.format(getattr(fh, 'name', '<none>')))

View File

@ -88,6 +88,6 @@ class IdleWorkload(Workload):
self.target.sleep(1)
if self.screen_off and self.old_screen_state:
self.target.ensure_screen_is_on()
elif (self.target.os == 'android' and
not self.screen_off and not self.old_screen_state):
elif (self.target.os == 'android'
and not self.screen_off and not self.old_screen_state):
self.target.ensure_screen_is_off()

View File

@ -259,8 +259,8 @@ class Speedometer(Workload):
while not benchmark_complete:
if self.target_file_was_created(local_storage):
if (
iterations % (find_period_s // sleep_period_s) == 0 or
not local_storage_seen
iterations % (find_period_s // sleep_period_s) == 0
or not local_storage_seen
):
# There's a chance we don't see the localstorage file immediately, and there's a
# chance more of them could be created later, so check for those files every ~30