1
0
mirror of https://github.com/ARM-software/workload-automation.git synced 2025-09-02 11:22:41 +01:00

Add support for Python 3

Add support for running under Python 3, while maintaining compatibility
with Python 2.

See http://python-future.org/compatible_idioms.html for more details
behind these changes.
This commit is contained in:
Sergei Trofimov
2018-05-30 13:58:49 +01:00
committed by Marc Bonnici
parent c3ddb31d4d
commit b3de85455a
53 changed files with 377 additions and 384 deletions

View File

@@ -95,11 +95,11 @@ class RebootPolicy(object):
__repr__ = __str__
def __cmp__(self, other):
def __eq__(self, other):
if isinstance(other, RebootPolicy):
return cmp(self.policy, other.policy)
return self.policy == other.policy
else:
return cmp(self.policy, other)
return self.policy == other
def to_pod(self):
return self.policy
@@ -127,7 +127,7 @@ class LoggingConfig(dict):
def __init__(self, config=None):
dict.__init__(self)
if isinstance(config, dict):
config = {identifier(k.lower()): v for k, v in config.iteritems()}
config = {identifier(k.lower()): v for k, v in config.items()}
self['regular_format'] = config.pop('regular_format', self.defaults['regular_format'])
self['verbose_format'] = config.pop('verbose_format', self.defaults['verbose_format'])
self['file_format'] = config.pop('file_format', self.defaults['file_format'])
@@ -135,9 +135,9 @@ class LoggingConfig(dict):
self['color'] = config.pop('color', self.defaults['color'])
if config:
message = 'Unexpected logging configuration parameters: {}'
raise ValueError(message.format(bad_vals=', '.join(config.keys())))
raise ValueError(message.format(bad_vals=', '.join(list(config.keys()))))
elif config is None:
for k, v in self.defaults.iteritems():
for k, v in self.defaults.items():
self[k] = v
else:
raise ValueError(config)
@@ -360,7 +360,7 @@ class Configuration(object):
cfg_point.set_value(instance, value)
if pod:
msg = 'Invalid entry(ies) for "{}": "{}"'
raise ValueError(msg.format(cls.name, '", "'.join(pod.keys())))
raise ValueError(msg.format(cls.name, '", "'.join(list(pod.keys()))))
return instance
def __init__(self):
@@ -380,7 +380,7 @@ class Configuration(object):
def update_config(self, values, check_mandatory=True):
for k, v in values.iteritems():
for k, v in values.items():
self.set(k, v, check_mandatory=check_mandatory)
def validate(self):
@@ -824,7 +824,7 @@ class JobSpec(Configuration):
def update_config(self, source, check_mandatory=True):
self._sources.append(source)
values = source.config
for k, v in values.iteritems():
for k, v in values.items():
if k == "id":
continue
elif k.endswith('_parameters'):
@@ -849,7 +849,7 @@ class JobSpec(Configuration):
if not config:
continue
for name, cfg_point in cfg_points.iteritems():
for name, cfg_point in cfg_points.items():
if name in config:
value = config.pop(name)
cfg_point.set_value(workload_params, value,
@@ -873,7 +873,7 @@ class JobSpec(Configuration):
runtime_parameters[source] = global_runtime_params[source]
# Add runtime parameters from JobSpec
for source, values in self.to_merge['runtime_parameters'].iteritems():
for source, values in self.to_merge['runtime_parameters'].items():
runtime_parameters[source] = values
# Merge
@@ -884,9 +884,9 @@ class JobSpec(Configuration):
for source in self._sources[1:]]) # ignore first id, "global"
# ensure *_parameters are always obj_dict's
self.boot_parameters = obj_dict((self.boot_parameters or {}).items())
self.runtime_parameters = obj_dict((self.runtime_parameters or {}).items())
self.workload_parameters = obj_dict((self.workload_parameters or {}).items())
self.boot_parameters = obj_dict(list((self.boot_parameters or {}).items()))
self.runtime_parameters = obj_dict(list((self.runtime_parameters or {}).items()))
self.workload_parameters = obj_dict(list((self.workload_parameters or {}).items()))
if self.label is None:
self.label = self.workload_name
@@ -903,7 +903,7 @@ class JobGenerator(object):
self._read_augmentations = True
if self._enabled_instruments is None:
self._enabled_instruments = []
for entry in self._enabled_augmentations.merge_with(self.disabled_augmentations).values():
for entry in list(self._enabled_augmentations.merge_with(self.disabled_augmentations).values()):
entry_cls = self.plugin_cache.get_plugin_class(entry)
if entry_cls.kind == 'instrument':
self._enabled_instruments.append(entry)
@@ -914,7 +914,7 @@ class JobGenerator(object):
self._read_augmentations = True
if self._enabled_processors is None:
self._enabled_processors = []
for entry in self._enabled_augmentations.merge_with(self.disabled_augmentations).values():
for entry in list(self._enabled_augmentations.merge_with(self.disabled_augmentations).values()):
entry_cls = self.plugin_cache.get_plugin_class(entry)
if entry_cls.kind == 'output_processor':
self._enabled_processors.append(entry)
@@ -934,7 +934,7 @@ class JobGenerator(object):
self.job_spec_template.name = "globally specified job spec configuration"
self.job_spec_template.id = "global"
# Load defaults
for cfg_point in JobSpec.configuration.itervalues():
for cfg_point in JobSpec.configuration.values():
cfg_point.set_value(self.job_spec_template, check_mandatory=False)
self.root_node = SectionNode(self.job_spec_template)
@@ -996,7 +996,7 @@ class JobGenerator(object):
break
else:
continue
self.update_augmentations(job_spec.augmentations.values())
self.update_augmentations(list(job_spec.augmentations.values()))
specs.append(job_spec)
return specs

View File

@@ -1,5 +1,7 @@
import random
from itertools import izip_longest, groupby, chain
from itertools import groupby, chain
from future.moves.itertools import zip_longest
from wa.framework.configuration.core import (MetaConfiguration, RunConfiguration,
JobGenerator, Status, settings)
@@ -157,8 +159,8 @@ def permute_by_iteration(specs):
all_tuples = []
for spec in chain(*groups):
all_tuples.append([(spec, i + 1)
for i in xrange(spec.iterations)])
for t in chain(*map(list, izip_longest(*all_tuples))):
for i in range(spec.iterations)])
for t in chain(*list(map(list, zip_longest(*all_tuples)))):
if t is not None:
yield t
@@ -183,8 +185,8 @@ def permute_by_section(specs):
all_tuples = []
for spec in chain(*groups):
all_tuples.append([(spec, i + 1)
for i in xrange(spec.iterations)])
for t in chain(*map(list, izip_longest(*all_tuples))):
for i in range(spec.iterations)])
for t in chain(*list(map(list, zip_longest(*all_tuples)))):
if t is not None:
yield t
@@ -196,7 +198,7 @@ def permute_randomly(specs):
"""
result = []
for spec in specs:
for i in xrange(1, spec.iterations + 1):
for i in range(1, spec.iterations + 1):
result.append((spec, i))
random.shuffle(result)
for t in result:
@@ -214,5 +216,5 @@ permute_map = {
def permute_iterations(specs, exec_order):
if exec_order not in permute_map:
msg = 'Unknown execution order "{}"; must be in: {}'
raise ValueError(msg.format(exec_order, permute_map.keys()))
raise ValueError(msg.format(exec_order, list(permute_map.keys())))
return permute_map[exec_order](specs)

View File

@@ -21,6 +21,7 @@ from wa.framework.exception import ConfigError
from wa.utils import log
from wa.utils.serializer import json, read_pod, SerializerSyntaxError
from wa.utils.types import toggle_set, counter
from functools import reduce
logger = logging.getLogger('config')
@@ -47,27 +48,27 @@ class ConfigParser(object):
merge_augmentations(raw)
# Get WA core configuration
for cfg_point in state.settings.configuration.itervalues():
for cfg_point in state.settings.configuration.values():
value = pop_aliased_param(cfg_point, raw)
if value is not None:
logger.debug('Setting meta "{}" to "{}"'.format(cfg_point.name, value))
state.settings.set(cfg_point.name, value)
# Get run specific configuration
for cfg_point in state.run_config.configuration.itervalues():
for cfg_point in state.run_config.configuration.values():
value = pop_aliased_param(cfg_point, raw)
if value is not None:
logger.debug('Setting run "{}" to "{}"'.format(cfg_point.name, value))
state.run_config.set(cfg_point.name, value)
# Get global job spec configuration
for cfg_point in JobSpec.configuration.itervalues():
for cfg_point in JobSpec.configuration.values():
value = pop_aliased_param(cfg_point, raw)
if value is not None:
logger.debug('Setting global "{}" to "{}"'.format(cfg_point.name, value))
state.jobs_config.set_global_value(cfg_point.name, value)
for name, values in raw.iteritems():
for name, values in raw.items():
# Assume that all leftover config is for a plug-in or a global
# alias it is up to PluginCache to assert this assumption
logger.debug('Caching "{}" with "{}"'.format(name, values))
@@ -106,7 +107,7 @@ class AgendaParser(object):
if raw:
msg = 'Invalid top level agenda entry(ies): "{}"'
raise ConfigError(msg.format('", "'.join(raw.keys())))
raise ConfigError(msg.format('", "'.join(list(raw.keys()))))
sect_ids, wkl_ids = self._collect_ids(sections, global_workloads)
self._process_global_workloads(state, global_workloads, wkl_ids)
@@ -301,7 +302,7 @@ def _construct_valid_entry(raw, seen_ids, prefix, jobs_config):
merge_augmentations(raw)
# Validate all workload_entry
for name, cfg_point in JobSpec.configuration.iteritems():
for name, cfg_point in JobSpec.configuration.items():
value = pop_aliased_param(cfg_point, raw)
if value is not None:
value = cfg_point.kind(value)
@@ -317,7 +318,7 @@ def _construct_valid_entry(raw, seen_ids, prefix, jobs_config):
# error if there are unknown workload_entry
if raw:
msg = 'Invalid entry(ies) in "{}": "{}"'
raise ConfigError(msg.format(workload_entry['id'], ', '.join(raw.keys())))
raise ConfigError(msg.format(workload_entry['id'], ', '.join(list(raw.keys()))))
return workload_entry
@@ -339,7 +340,7 @@ def _collect_valid_id(entry_id, seen_ids, entry_type):
def _get_workload_entry(workload):
if isinstance(workload, basestring):
if isinstance(workload, str):
workload = {'name': workload}
elif not isinstance(workload, dict):
raise ConfigError('Invalid workload entry: "{}"')

View File

@@ -90,11 +90,11 @@ class PluginCache(object):
msg = 'configuration provided for unknown plugin "{}"'
raise ConfigError(msg.format(plugin_name))
if not hasattr(values, 'iteritems'):
if not hasattr(values, 'items'):
msg = 'Plugin configuration for "{}" not a dictionary ({} is {})'
raise ConfigError(msg.format(plugin_name, repr(values), type(values)))
for name, value in values.iteritems():
for name, value in values.items():
if (plugin_name not in GENERIC_CONFIGS and
name not in self.get_plugin_parameters(plugin_name)):
msg = "'{}' is not a valid parameter for '{}'"
@@ -124,7 +124,7 @@ class PluginCache(object):
for source in self.sources:
if source not in plugin_config:
continue
for name, value in plugin_config[source].iteritems():
for name, value in plugin_config[source].items():
cfg_points[name].set_value(config, value=value)
else:
# A more complicated merge that involves priority of sources and
@@ -136,7 +136,7 @@ class PluginCache(object):
def get_plugin(self, name, kind=None, *args, **kwargs):
config = self.get_plugin_config(name)
kwargs = dict(config.items() + kwargs.items())
kwargs = dict(list(config.items()) + list(kwargs.items()))
return self.loader.get_plugin(name, kind=kind, *args, **kwargs)
def get_plugin_class(self, name, kind=None):
@@ -154,18 +154,18 @@ class PluginCache(object):
def _set_plugin_defaults(self, plugin_name, config):
cfg_points = self.get_plugin_parameters(plugin_name)
for cfg_point in cfg_points.itervalues():
for cfg_point in cfg_points.values():
cfg_point.set_value(config, check_mandatory=False)
try:
_, alias_params = self.resolve_alias(plugin_name)
for name, value in alias_params.iteritems():
for name, value in alias_params.items():
cfg_points[name].set_value(config, value)
except NotFoundError:
pass
def _set_from_global_aliases(self, plugin_name, config):
for alias, param in self._global_alias_map[plugin_name].iteritems():
for alias, param in self._global_alias_map[plugin_name].items():
if alias in self.global_alias_values:
for source in self.sources:
if source not in self.global_alias_values[alias]:
@@ -230,7 +230,7 @@ class PluginCache(object):
# Validate final configuration
merged_config.name = specific_name
for cfg_point in ms.cfg_points.itervalues():
for cfg_point in ms.cfg_points.values():
cfg_point.validate(merged_config, check_mandatory=is_final)
def __getattr__(self, name):
@@ -285,7 +285,7 @@ class MergeState(object):
def update_config_from_source(final_config, source, state):
if source in state.generic_config:
final_config.name = state.generic_name
for name, cfg_point in state.cfg_points.iteritems():
for name, cfg_point in state.cfg_points.items():
if name in state.generic_config[source]:
if name in state.seen_specific_config:
msg = ('"{generic_name}" configuration "{config_name}" has '
@@ -307,7 +307,7 @@ def update_config_from_source(final_config, source, state):
if source in state.specific_config:
final_config.name = state.specific_name
for name, cfg_point in state.cfg_points.iteritems():
for name, cfg_point in state.cfg_points.items():
if name in state.specific_config[source]:
state.seen_specific_config[name].append(str(source))
value = state.specific_config[source].pop(name)

View File

@@ -39,7 +39,7 @@ class JobSpecSource(object):
def _log_self(self):
logger.debug('Creating {} node'.format(self.kind))
with log.indentcontext():
for key, value in self.config.iteritems():
for key, value in self.config.items():
logger.debug('"{}" to "{}"'.format(key, value))

View File

@@ -20,7 +20,11 @@ from wa.utils.misc import get_traceback
class WAError(Exception):
"""Base class for all Workload Automation exceptions."""
pass
@property
def message(self):
if self.args:
return self.args[0]
return ''
class NotFoundError(WAError):

View File

@@ -464,7 +464,7 @@ class Runner(object):
self.logger.info('Skipping remaining jobs.')
self.context.skip_remaining_jobs()
except Exception as e:
message = e.message if e.message else str(e)
message = e.args[0] if e.args else str(e)
log.log_error(e, self.logger)
self.logger.error('Skipping remaining jobs due to "{}".'.format(e))
self.context.skip_remaining_jobs()

View File

@@ -18,7 +18,7 @@
This module contains the standard set of resource getters used by Workload Automation.
"""
import httplib
import http.client
import json
import logging
import os
@@ -233,13 +233,17 @@ class Http(ResourceGetter):
return {}
index_url = urljoin(self.url, 'index.json')
response = self.geturl(index_url)
if response.status_code != httplib.OK:
if response.status_code != http.client.OK:
message = 'Could not fetch "{}"; recieved "{} {}"'
self.logger.error(message.format(index_url,
response.status_code,
response.reason))
return {}
return json.loads(response.content)
if sys.version_info[0] == 3:
content = response.content.decode('utf-8')
else:
content = response.content
return json.loads(content)
def download_asset(self, asset, owner_name):
url = urljoin(self.url, owner_name, asset['path'])
@@ -252,7 +256,7 @@ class Http(ResourceGetter):
return local_path
self.logger.debug('Downloading {}'.format(url))
response = self.geturl(url, stream=True)
if response.status_code != httplib.OK:
if response.status_code != http.client.OK:
message = 'Could not download asset "{}"; recieved "{} {}"'
self.logger.warning(message.format(url,
response.status_code,
@@ -275,7 +279,7 @@ class Http(ResourceGetter):
if not assets:
return None
asset_map = {a['path']: a for a in assets}
paths = get_path_matches(resource, asset_map.keys())
paths = get_path_matches(resource, list(asset_map.keys()))
local_paths = []
for path in paths:
local_paths.append(self.download_asset(asset_map[path],
@@ -292,7 +296,7 @@ class Http(ResourceGetter):
asset_map = {a['path']: a for a in assets}
if resource.kind in ['jar', 'revent']:
path = get_generic_resource(resource, asset_map.keys())
path = get_generic_resource(resource, list(asset_map.keys()))
if path:
return asset_map[path]
elif resource.kind == 'executable':

View File

@@ -90,7 +90,7 @@ def convert_wa2_agenda(filepath, output_path):
default=True),
])
for param in orig_agenda.keys():
for param in list(orig_agenda.keys()):
for cfg_point in config_points:
if param == cfg_point.name or param in cfg_point.aliases:
if cfg_point.name == 'augmentations':
@@ -105,7 +105,7 @@ def convert_wa2_agenda(filepath, output_path):
# Convert plugin configuration
output.write("# Plugin Configuration\n")
for param in orig_agenda.keys():
for param in list(orig_agenda.keys()):
if pluginloader.has_plugin(param):
entry = {param: orig_agenda.pop(param)}
yaml.dump(format_parameter(entry), output, default_flow_style=False)
@@ -114,7 +114,7 @@ def convert_wa2_agenda(filepath, output_path):
# Write any additional aliased parameters into new config
plugin_cache = PluginCache()
output.write("# Additional global aliases\n")
for param in orig_agenda.keys():
for param in list(orig_agenda.keys()):
if plugin_cache.is_global_alias(param):
entry = {param: orig_agenda.pop(param)}
yaml.dump(format_parameter(entry), output, default_flow_style=False)
@@ -123,7 +123,7 @@ def convert_wa2_agenda(filepath, output_path):
def format_parameter(param):
if isinstance(param, dict):
return {identifier(k) : v for k, v in param.iteritems()}
return {identifier(k) : v for k, v in param.items()}
else:
return param

View File

@@ -165,7 +165,7 @@ def priority(priority):
def decorate(func):
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.func_name = func.func_name
wrapper.__name__ = func.__name__
if priority in signal.CallbackPriority.levels:
wrapper.priority = signal.CallbackPriority(priority)
else:
@@ -255,7 +255,7 @@ class ManagedCallback(object):
global failures_detected # pylint: disable=W0603
failures_detected = True
log_error(e, logger)
context.add_event(e.message)
context.add_event(e.args[0] if e.args else str(e))
if isinstance(e, WorkloadError):
context.set_status('FAILED')
elif isinstance(e, TargetError) or isinstance(e, TimeoutError):
@@ -268,7 +268,7 @@ class ManagedCallback(object):
def __repr__(self):
text = 'ManagedCallback({}, {})'
return text.format(self.instrument.name, self.callback.im_func.func_name)
return text.format(self.instrument.name, self.callback.__func__.__name__)
__str__ = __repr__

View File

@@ -85,7 +85,7 @@ class Job(object):
enabled_instruments = set(i.name for i in instrument.get_enabled())
enabled_output_processors = set(p.name for p in pm.get_enabled())
for augmentation in self.spec.augmentations.values():
for augmentation in list(self.spec.augmentations.values()):
augmentation_cls = context.cm.plugin_cache.get_plugin_class(augmentation)
if augmentation_cls.kind == 'instrument':
instruments_to_enable.add(augmentation)

View File

@@ -10,7 +10,7 @@ from wa.framework.configuration.execution import CombinedConfig
from wa.framework.exception import HostError
from wa.framework.run import RunState, RunInfo
from wa.framework.target.info import TargetInfo
from wa.utils.misc import touch, ensure_directory_exists
from wa.utils.misc import touch, ensure_directory_exists, isiterable
from wa.utils.serializer import write_pod, read_pod, is_pod
from wa.utils.types import enum, numeric
@@ -229,7 +229,7 @@ class RunOutput(Output):
if os.path.isfile(self.jobsfile):
self.job_specs = self.read_job_specs()
for job_state in self.state.jobs.itervalues():
for job_state in self.state.jobs.values():
job_path = os.path.join(self.basepath, job_state.output_name)
job = JobOutput(job_path, job_state.id,
job_state.label, job_state.iteration,
@@ -387,14 +387,14 @@ class Result(object):
if key not in self.metadata:
return self.add_metadata(key, *args)
if hasattr(self.metadata[key], 'iteritems'):
if hasattr(self.metadata[key], 'items'):
if len(args) == 2:
self.metadata[key][args[0]] = args[1]
elif len(args) > 2: # assume list of key-value pairs
for k, v in args:
self.metadata[key][k] = v
elif hasattr(args[0], 'iteritems'):
for k, v in args[0].iteritems():
elif hasattr(args[0], 'items'):
for k, v in args[0].items():
self.metadata[key][k] = v
else:
raise ValueError('Invalid value for key "{}": {}'.format(key, args))

View File

@@ -25,6 +25,8 @@ from collections import OrderedDict, defaultdict
from itertools import chain
from copy import copy
from future.utils import with_metaclass
from wa.framework.configuration.core import settings, ConfigurationPoint as Parameter
from wa.framework.exception import (NotFoundError, PluginLoaderError, TargetError,
ValidationError, ConfigError, HostError)
@@ -34,7 +36,10 @@ from wa.utils.misc import (ensure_directory_exists as _d, walk_modules, load_cla
from wa.utils.types import identifier
MODNAME_TRANS = string.maketrans(':/\\.', '____')
if sys.version_info[0] == 3:
MODNAME_TRANS = str.maketrans(':/\\.', '____')
else:
MODNAME_TRANS = string.maketrans(':/\\.', '____')
class AttributeCollection(object):
@@ -50,7 +55,7 @@ class AttributeCollection(object):
@property
def values(self):
return self._attrs.values()
return list(self._attrs.values())
def __init__(self, attrcls):
self._attrcls = attrcls
@@ -61,7 +66,7 @@ class AttributeCollection(object):
if p.name in self._attrs:
if p.override:
newp = copy(self._attrs[p.name])
for a, v in p.__dict__.iteritems():
for a, v in p.__dict__.items():
if v is not None:
setattr(newp, a, v)
if not hasattr(newp, "_overridden"):
@@ -77,7 +82,7 @@ class AttributeCollection(object):
append = add
def __str__(self):
return 'AC({})'.format(map(str, self._attrs.values()))
return 'AC({})'.format(list(map(str, list(self._attrs.values()))))
__repr__ = __str__
@@ -212,14 +217,14 @@ class PluginMeta(type):
if hasattr(cls, 'aliases'):
aliases, cls.aliases = cls.aliases, AliasCollection()
for alias in aliases:
if isinstance(alias, basestring):
if isinstance(alias, str):
alias = Alias(alias)
alias.validate(cls)
alias.plugin_name = cls.name
cls.aliases.add(alias)
class Plugin(object):
class Plugin(with_metaclass(PluginMeta, object)):
"""
Base class for all WA plugins. An plugin is basically a plug-in. It
extends the functionality of WA in some way. Plugins are discovered and
@@ -230,7 +235,6 @@ class Plugin(object):
``~/.workload_automation/``.
"""
__metaclass__ = PluginMeta
kind = None
name = None
@@ -334,7 +338,7 @@ class Plugin(object):
can = has
def _load_module(self, loader, module_spec):
if isinstance(module_spec, basestring):
if isinstance(module_spec, str):
name = module_spec
params = {}
elif isinstance(module_spec, dict):
@@ -342,7 +346,7 @@ class Plugin(object):
msg = 'Invalid module spec: {}; dict must have exctly one key -- '\
'the module name.'
raise ValueError(msg.format(module_spec))
name, params = module_spec.items()[0]
name, params = list(module_spec.items())[0]
else:
message = 'Invalid module spec: {}; must be a string or a one-key dict.'
raise ValueError(message.format(module_spec))
@@ -491,7 +495,7 @@ class PluginLoader(object):
"""
name, base_kwargs = self.resolve_alias(name)
kwargs = OrderedDict(chain(base_kwargs.iteritems(), kwargs.iteritems()))
kwargs = OrderedDict(chain(iter(base_kwargs.items()), iter(kwargs.items())))
cls = self.get_plugin_class(name, kind)
plugin = cls(*args, **kwargs)
return plugin
@@ -514,10 +518,10 @@ class PluginLoader(object):
"""
if kind is None:
return self.plugins.values()
return list(self.plugins.values())
if kind not in self.kind_map:
raise ValueError('Unknown plugin type: {}'.format(kind))
return self.kind_map[kind].values()
return list(self.kind_map[kind].values())
def has_plugin(self, name, kind=None):
"""
@@ -625,7 +629,7 @@ class PluginLoader(object):
modname = os.path.splitext(filepath[1:])[0].translate(MODNAME_TRANS)
module = imp.load_source(modname, filepath)
self._discover_in_module(module)
except (SystemExit, ImportError), e:
except (SystemExit, ImportError) as e:
if self.keep_going:
self.logger.warning('Failed to load {}'.format(filepath))
self.logger.warning('Got: {}'.format(e))
@@ -639,7 +643,7 @@ class PluginLoader(object):
def _discover_in_module(self, module): # NOQA pylint: disable=too-many-branches
self.logger.debug('Checking module %s', module.__name__)
with log.indentcontext():
for obj in vars(module).itervalues():
for obj in vars(module).values():
if inspect.isclass(obj):
if not issubclass(obj, Plugin):
continue

View File

@@ -21,7 +21,7 @@ class __LoaderWrapper(object):
def kinds(self):
if not self._loader:
self.reset()
return self._loader.kind_map.keys()
return list(self._loader.kind_map.keys())
@property
def kind_map(self):

View File

@@ -287,7 +287,7 @@ def loose_version_matching(config_version, apk_version):
if len(apk_version) < len(config_version):
return False # More specific version requested than available
for i in xrange(len(config_version)):
for i in range(len(config_version)):
if config_version[i] != apk_version[i]:
return False
return True

View File

@@ -76,7 +76,7 @@ class RunState(object):
@property
def num_completed_jobs(self):
return sum(1 for js in self.jobs.itervalues()
return sum(1 for js in self.jobs.values()
if js.status > Status.RUNNING)
def __init__(self):
@@ -95,7 +95,7 @@ class RunState(object):
def get_status_counts(self):
counter = Counter()
for job_state in self.jobs.itervalues():
for job_state in self.jobs.values():
counter[job_state.status] += 1
return counter
@@ -103,7 +103,7 @@ class RunState(object):
return OrderedDict(
status=str(self.status),
timestamp=self.timestamp,
jobs=[j.to_pod() for j in self.jobs.itervalues()],
jobs=[j.to_pod() for j in self.jobs.values()],
)

View File

@@ -28,7 +28,7 @@ def list_target_descriptions(loader=pluginloader):
raise PluginLoaderError(msg.format(desc.name, prev_dtor.name,
descriptor.name))
targets[desc.name] = desc
return targets.values()
return list(targets.values())
def get_target_description(name, loader=pluginloader):
@@ -47,11 +47,11 @@ def instantiate_target(tdesc, params, connect=None, extra_platform_params=None):
tp, pp, cp = {}, {}, {}
for supported_params, new_params in (target_params, tp), (platform_params, pp), (conn_params, cp):
for name, value in supported_params.iteritems():
for name, value in supported_params.items():
if value.default and name == value.name:
new_params[name] = value.default
for name, value in params.iteritems():
for name, value in params.items():
if name in target_params:
tp[name] = value
elif name in platform_params:
@@ -64,7 +64,7 @@ def instantiate_target(tdesc, params, connect=None, extra_platform_params=None):
msg = 'Unexpected parameter for {}: {}'
raise ValueError(msg.format(tdesc.name, name))
for pname, pval in (extra_platform_params or {}).iteritems():
for pname, pval in (extra_platform_params or {}).items():
if pname in pp:
raise RuntimeError('Platform parameter clash: {}'.format(pname))
pp[pname] = pval
@@ -121,7 +121,7 @@ class TargetDescription(object):
vals = []
elif isiterable(vals):
if hasattr(vals, 'values'):
vals = v.values()
vals = list(v.values())
else:
msg = '{} must be iterable; got "{}"'
raise ValueError(msg.format(attr, vals))
@@ -453,11 +453,11 @@ class DefaultTargetDescriptor(TargetDescriptor):
def get_descriptions(self):
result = []
for target_name, target_tuple in TARGETS.iteritems():
for target_name, target_tuple in TARGETS.items():
(target, conn), target_params = self._get_item(target_tuple)
assistant = ASSISTANTS[target_name]
conn_params = CONNECTION_PARAMS[conn]
for platform_name, platform_tuple in PLATFORMS.iteritems():
for platform_name, platform_tuple in PLATFORMS.items():
(platform, plat_conn), platform_params = self._get_item(platform_tuple)
name = '{}_{}'.format(platform_name, target_name)
td = TargetDescription(name, self)
@@ -484,11 +484,11 @@ class DefaultTargetDescriptor(TargetDescriptor):
return cls, params
param_map = OrderedDict((p.name, copy(p)) for p in params)
for name, value in defaults.iteritems():
for name, value in defaults.items():
if name not in param_map:
raise ValueError('Unexpected default "{}"'.format(name))
param_map[name].default = value
return cls, param_map.values()
return cls, list(param_map.values())
@@ -522,7 +522,7 @@ def create_target_description(name, *args, **kwargs):
def _get_target_defaults(target):
specificity = 0
res = ('linux', TARGETS['linux']) # fallback to a generic linux target
for name, ttup in TARGETS.iteritems():
for name, ttup in TARGETS.items():
if issubclass(target, ttup[0][0]):
new_spec = len(inspect.getmro(ttup[0][0]))
if new_spec > specificity:
@@ -540,7 +540,7 @@ def add_description_for_target(target, description=None, **kwargs):
if 'platform' not in kwargs:
kwargs['platform'] = Platform
if 'platform_params' not in kwargs:
for (plat, conn), params, _ in PLATFORMS.itervalues():
for (plat, conn), params, _ in PLATFORMS.values():
if plat == kwargs['platform']:
kwargs['platform_params'] = params
if conn is not None and kwargs['conn'] is None:

View File

@@ -10,7 +10,7 @@ def cpuinfo_from_pod(pod):
cpuinfo.sections = pod['cpuinfo']
lines = []
for section in cpuinfo.sections:
for key, value in section.iteritems():
for key, value in section.items():
line = '{}: {}'.format(key, value)
lines.append(line)
lines.append('')
@@ -35,7 +35,7 @@ def kernel_config_from_pod(pod):
config = KernelConfig('')
config._config = pod['kernel_config']
lines = []
for key, value in config._config.iteritems():
for key, value in config._config.items():
if value == 'n':
lines.append('# {} is not set'.format(key))
else:

View File

@@ -33,7 +33,7 @@ class RuntimeConfig(Plugin):
@property
def supported_parameters(self):
return self._runtime_params.values()
return list(self._runtime_params.values())
@property
def core_names(self):
@@ -166,12 +166,12 @@ class HotplugRuntimeConfig(RuntimeConfig):
def validate_parameters(self):
if len(self.num_cores) == self.target.number_of_cpus:
if all(v is False for v in self.num_cores.values()):
if all(v is False for v in list(self.num_cores.values())):
raise ValueError('Cannot set number of all cores to 0')
def commit(self):
'''Online all CPUs required in order before then off-lining'''
num_cores = sorted(self.num_cores.iteritems())
num_cores = sorted(self.num_cores.items())
for cpu, online in num_cores:
if online:
self.target.hotplug.online(cpu)
@@ -190,7 +190,7 @@ class SysfileValuesRuntimeConfig(RuntimeConfig):
#pylint: disable=unused-argument
@staticmethod
def set_sysfile(obj, value, core):
for path, value in value.iteritems():
for path, value in value.items():
verify = True
if path.endswith('!'):
verify = False
@@ -222,7 +222,7 @@ class SysfileValuesRuntimeConfig(RuntimeConfig):
return
def commit(self):
for path, (value, verify) in self.sysfile_values.iteritems():
for path, (value, verify) in self.sysfile_values.items():
self.target.write_value(path, value, verify=verify)
def clear(self):
@@ -255,7 +255,7 @@ class FreqValue(object):
raise TargetError(msg.format(value))
elif isinstance(value, int) and value in self.values:
return value
elif isinstance(value, basestring):
elif isinstance(value, str):
value = caseless_string(value)
if value in ['min', 'max']:
return value
@@ -675,7 +675,7 @@ class IdleStateValue(object):
if self.values is None:
return value
if isinstance(value, basestring):
if isinstance(value, str):
value = caseless_string(value)
if value == 'all':
return [state[0] for state in self.values]

View File

@@ -39,7 +39,7 @@ class RuntimeParameterManager(object):
def merge_runtime_parameters(self, parameters):
merged_params = obj_dict()
for source in parameters:
for name, value in parameters[source].iteritems():
for name, value in parameters[source].items():
cp = self.get_cfg_point(name)
cp.set_value(merged_params, value)
return dict(merged_params)
@@ -60,7 +60,7 @@ class RuntimeParameterManager(object):
# Stores a set of parameters performing isolated validation when appropriate
def set_runtime_parameters(self, parameters):
for name, value in parameters.iteritems():
for name, value in parameters.items():
cfg = self.get_config_for_name(name)
if cfg is None:
msg = 'Unsupported runtime parameter: "{}"'
@@ -74,14 +74,14 @@ class RuntimeParameterManager(object):
def get_config_for_name(self, name):
name = caseless_string(name)
for k, v in self.runtime_params.iteritems():
for k, v in self.runtime_params.items():
if name == k:
return v.rt_config
return None
def get_cfg_point(self, name):
name = caseless_string(name)
for k, v in self.runtime_params.iteritems():
for k, v in self.runtime_params.items():
if name == k:
return v.cfg_point
raise ConfigError('Unknown runtime parameter: {}'.format(name))

View File

@@ -14,6 +14,7 @@
#
import os
import sys
from collections import namedtuple
from subprocess import Popen, PIPE
@@ -45,4 +46,7 @@ def get_commit():
p.wait()
if p.returncode:
return None
return std[:8]
if sys.version_info[0] == 3:
return std[:8].decode(sys.stdout.encoding)
else:
return std[:8]