mirror of
https://github.com/ARM-software/workload-automation.git
synced 2025-03-22 10:38:37 +00:00
Add support for Python 3
Add support for running under Python 3, while maintaining compatibility with Python 2. See http://python-future.org/compatible_idioms.html for more details behind these changes.
This commit is contained in:
parent
c3ddb31d4d
commit
b3de85455a
@ -19,8 +19,8 @@ For more information on migrating from WA2 to WA3 please see the
|
|||||||
|
|
||||||
Not all of WA2 extensions have been ported for the initial 3.0.0 release. We
|
Not all of WA2 extensions have been ported for the initial 3.0.0 release. We
|
||||||
have ported the ones we believe to be most widely used and useful. The porting
|
have ported the ones we believe to be most widely used and useful. The porting
|
||||||
work will continue, and more of WA2's extensions will be in the future releases,
|
work will continue, and more of WA2's extensions will be in the future releases.
|
||||||
however we do not intend to port absolutely everything, as some things we
|
However, we do not intend to port absolutely everything, as some things we
|
||||||
believe to be no longer useful.
|
believe to be no longer useful.
|
||||||
|
|
||||||
.. note:: If there a particular WA2 extension you would like to see in WA3 that
|
.. note:: If there a particular WA2 extension you would like to see in WA3 that
|
||||||
@ -31,6 +31,15 @@ believe to be no longer useful.
|
|||||||
New Features
|
New Features
|
||||||
~~~~~~~~~~~~
|
~~~~~~~~~~~~
|
||||||
|
|
||||||
|
- Python 3 support. WA now runs on both Python 2 and Python 3.
|
||||||
|
|
||||||
|
.. warning:: Python 2 support should now be considered depricated. Python 2
|
||||||
|
will still be fully supported up to the next major release
|
||||||
|
(v3.1). After that, Python 2 will be supported for existing
|
||||||
|
functionality, however there will be no guarantee that newly
|
||||||
|
added functionality would be compatible with Python 2. Support
|
||||||
|
for Python 2 will be dropped completely after release v3.2.
|
||||||
|
|
||||||
- There is a new Output API which can be used to aid in post processing a
|
- There is a new Output API which can be used to aid in post processing a
|
||||||
run's output. For more information please see :ref:`output_processing_api`.
|
run's output. For more information please see :ref:`output_processing_api`.
|
||||||
- All "augmentations" can now be enabled on a per workload basis (in WA2 this
|
- All "augmentations" can now be enabled on a per workload basis (in WA2 this
|
||||||
|
5
setup.py
5
setup.py
@ -81,9 +81,10 @@ params = dict(
|
|||||||
'pyYAML', # YAML-formatted agenda parsing
|
'pyYAML', # YAML-formatted agenda parsing
|
||||||
'requests', # Fetch assets over HTTP
|
'requests', # Fetch assets over HTTP
|
||||||
'devlib>=0.0.4', # Interacting with devices
|
'devlib>=0.0.4', # Interacting with devices
|
||||||
'louie', # callbacks dispatch
|
'louie-latest', # callbacks dispatch
|
||||||
'wrapt', # better decorators
|
'wrapt', # better decorators
|
||||||
'pandas>=0.13.1', # Data analysis and manipulation
|
'pandas>=0.13.1', # Data analysis and manipulation
|
||||||
|
'future', # Python 2-3 compatiblity
|
||||||
],
|
],
|
||||||
dependency_links=['https://github.com/ARM-software/devlib/tarball/master#egg=devlib-0.0.4'],
|
dependency_links=['https://github.com/ARM-software/devlib/tarball/master#egg=devlib-0.0.4'],
|
||||||
|
|
||||||
@ -104,7 +105,7 @@ params = dict(
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
all_extras = list(chain(params['extras_require'].itervalues()))
|
all_extras = list(chain(iter(params['extras_require'].values())))
|
||||||
params['extras_require']['everything'] = all_extras
|
params['extras_require']['everything'] = all_extras
|
||||||
|
|
||||||
setup(**params)
|
setup(**params)
|
||||||
|
@ -94,9 +94,9 @@ class CreateWorkloadSubcommand(SubCommand):
|
|||||||
self.parser.add_argument('-f', '--force', action='store_true',
|
self.parser.add_argument('-f', '--force', action='store_true',
|
||||||
help='Create the new workload even if a workload with the specified ' +
|
help='Create the new workload even if a workload with the specified ' +
|
||||||
'name already exists.')
|
'name already exists.')
|
||||||
self.parser.add_argument('-k', '--kind', metavar='KIND', default='basic', choices=create_funcs.keys(),
|
self.parser.add_argument('-k', '--kind', metavar='KIND', default='basic', choices=list(create_funcs.keys()),
|
||||||
help='The type of workload to be created. The available options ' +
|
help='The type of workload to be created. The available options ' +
|
||||||
'are: {}'.format(', '.join(create_funcs.keys())))
|
'are: {}'.format(', '.join(list(create_funcs.keys()))))
|
||||||
|
|
||||||
def execute(self, state, args): # pylint: disable=R0201
|
def execute(self, state, args): # pylint: disable=R0201
|
||||||
where = args.path or 'local'
|
where = args.path or 'local'
|
||||||
@ -179,7 +179,7 @@ def create_workload(name, kind='basic', where='local', check_name=True, **kwargs
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
raise CommandError('Unknown workload type: {}'.format(kind))
|
raise CommandError('Unknown workload type: {}'.format(kind))
|
||||||
|
|
||||||
print 'Workload created in {}'.format(workload_dir)
|
print('Workload created in {}'.format(workload_dir))
|
||||||
|
|
||||||
|
|
||||||
def create_template_workload(path, name, kind, class_name):
|
def create_template_workload(path, name, kind, class_name):
|
||||||
|
@ -71,7 +71,7 @@ def list_targets():
|
|||||||
output = DescriptionListFormatter()
|
output = DescriptionListFormatter()
|
||||||
for target in targets:
|
for target in targets:
|
||||||
output.add_item(target.description or '', target.name)
|
output.add_item(target.description or '', target.name)
|
||||||
print output.format_data()
|
print(output.format_data())
|
||||||
|
|
||||||
|
|
||||||
def list_plugins(args, filters):
|
def list_plugins(args, filters):
|
||||||
@ -80,7 +80,7 @@ def list_plugins(args, filters):
|
|||||||
filtered_results = []
|
filtered_results = []
|
||||||
for result in results:
|
for result in results:
|
||||||
passed = True
|
passed = True
|
||||||
for k, v in filters.iteritems():
|
for k, v in filters.items():
|
||||||
if getattr(result, k) != v:
|
if getattr(result, k) != v:
|
||||||
passed = False
|
passed = False
|
||||||
break
|
break
|
||||||
@ -95,7 +95,7 @@ def list_plugins(args, filters):
|
|||||||
output = DescriptionListFormatter()
|
output = DescriptionListFormatter()
|
||||||
for result in sorted(filtered_results, key=lambda x: x.name):
|
for result in sorted(filtered_results, key=lambda x: x.name):
|
||||||
output.add_item(get_summary(result), result.name)
|
output.add_item(get_summary(result), result.name)
|
||||||
print output.format_data()
|
print(output.format_data())
|
||||||
|
|
||||||
|
|
||||||
def check_platform(plugin, platform):
|
def check_platform(plugin, platform):
|
||||||
|
@ -24,6 +24,10 @@ from wa.framework.target.manager import TargetManager
|
|||||||
from wa.utils.revent import ReventRecorder
|
from wa.utils.revent import ReventRecorder
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info[0] == 3:
|
||||||
|
raw_input = input
|
||||||
|
|
||||||
|
|
||||||
class RecordCommand(Command):
|
class RecordCommand(Command):
|
||||||
|
|
||||||
name = 'record'
|
name = 'record'
|
||||||
@ -146,7 +150,7 @@ class RecordCommand(Command):
|
|||||||
if os.path.exists(host_path):
|
if os.path.exists(host_path):
|
||||||
msg = 'Revent file \'{}\' already exists, overwrite? [y/n]'
|
msg = 'Revent file \'{}\' already exists, overwrite? [y/n]'
|
||||||
self.logger.info(msg.format(revent_file_name))
|
self.logger.info(msg.format(revent_file_name))
|
||||||
if raw_input('') == 'y':
|
if input('') == 'y':
|
||||||
os.remove(host_path)
|
os.remove(host_path)
|
||||||
else:
|
else:
|
||||||
msg = 'Did not pull and overwrite \'{}\''
|
msg = 'Did not pull and overwrite \'{}\''
|
||||||
@ -222,7 +226,7 @@ class RecordCommand(Command):
|
|||||||
if not file_name:
|
if not file_name:
|
||||||
file_name = '{}.revent'.format(self.target.model)
|
file_name = '{}.revent'.format(self.target.model)
|
||||||
if not output_path:
|
if not output_path:
|
||||||
output_path = os.getcwdu()
|
output_path = os.getcwd()
|
||||||
|
|
||||||
return output_path, file_name
|
return output_path, file_name
|
||||||
|
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
import sys
|
||||||
from subprocess import call, Popen, PIPE
|
from subprocess import call, Popen, PIPE
|
||||||
|
|
||||||
from wa import Command
|
from wa import Command
|
||||||
@ -66,7 +67,11 @@ class ShowCommand(Command):
|
|||||||
|
|
||||||
if which('pandoc'):
|
if which('pandoc'):
|
||||||
p = Popen(['pandoc', '-f', 'rst', '-t', 'man'], stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
p = Popen(['pandoc', '-f', 'rst', '-t', 'man'], stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
||||||
output, _ = p.communicate(rst_output)
|
if sys.version_info[0] == 3:
|
||||||
|
output, _ = p.communicate(rst_output.encode(sys.stdin.encoding))
|
||||||
|
output = output.decode(sys.stdout.encoding)
|
||||||
|
else:
|
||||||
|
output, _ = p.communicate(rst_output)
|
||||||
|
|
||||||
# Make sure to double escape back slashes
|
# Make sure to double escape back slashes
|
||||||
output = output.replace('\\', '\\\\\\')
|
output = output.replace('\\', '\\\\\\')
|
||||||
@ -78,7 +83,7 @@ class ShowCommand(Command):
|
|||||||
|
|
||||||
call('echo "{}" | man -l -'.format(escape_double_quotes(output)), shell=True)
|
call('echo "{}" | man -l -'.format(escape_double_quotes(output)), shell=True)
|
||||||
else:
|
else:
|
||||||
print rst_output
|
print(rst_output)
|
||||||
|
|
||||||
|
|
||||||
def get_target_description(name):
|
def get_target_description(name):
|
||||||
|
@ -95,11 +95,11 @@ class RebootPolicy(object):
|
|||||||
|
|
||||||
__repr__ = __str__
|
__repr__ = __str__
|
||||||
|
|
||||||
def __cmp__(self, other):
|
def __eq__(self, other):
|
||||||
if isinstance(other, RebootPolicy):
|
if isinstance(other, RebootPolicy):
|
||||||
return cmp(self.policy, other.policy)
|
return self.policy == other.policy
|
||||||
else:
|
else:
|
||||||
return cmp(self.policy, other)
|
return self.policy == other
|
||||||
|
|
||||||
def to_pod(self):
|
def to_pod(self):
|
||||||
return self.policy
|
return self.policy
|
||||||
@ -127,7 +127,7 @@ class LoggingConfig(dict):
|
|||||||
def __init__(self, config=None):
|
def __init__(self, config=None):
|
||||||
dict.__init__(self)
|
dict.__init__(self)
|
||||||
if isinstance(config, dict):
|
if isinstance(config, dict):
|
||||||
config = {identifier(k.lower()): v for k, v in config.iteritems()}
|
config = {identifier(k.lower()): v for k, v in config.items()}
|
||||||
self['regular_format'] = config.pop('regular_format', self.defaults['regular_format'])
|
self['regular_format'] = config.pop('regular_format', self.defaults['regular_format'])
|
||||||
self['verbose_format'] = config.pop('verbose_format', self.defaults['verbose_format'])
|
self['verbose_format'] = config.pop('verbose_format', self.defaults['verbose_format'])
|
||||||
self['file_format'] = config.pop('file_format', self.defaults['file_format'])
|
self['file_format'] = config.pop('file_format', self.defaults['file_format'])
|
||||||
@ -135,9 +135,9 @@ class LoggingConfig(dict):
|
|||||||
self['color'] = config.pop('color', self.defaults['color'])
|
self['color'] = config.pop('color', self.defaults['color'])
|
||||||
if config:
|
if config:
|
||||||
message = 'Unexpected logging configuration parameters: {}'
|
message = 'Unexpected logging configuration parameters: {}'
|
||||||
raise ValueError(message.format(bad_vals=', '.join(config.keys())))
|
raise ValueError(message.format(bad_vals=', '.join(list(config.keys()))))
|
||||||
elif config is None:
|
elif config is None:
|
||||||
for k, v in self.defaults.iteritems():
|
for k, v in self.defaults.items():
|
||||||
self[k] = v
|
self[k] = v
|
||||||
else:
|
else:
|
||||||
raise ValueError(config)
|
raise ValueError(config)
|
||||||
@ -360,7 +360,7 @@ class Configuration(object):
|
|||||||
cfg_point.set_value(instance, value)
|
cfg_point.set_value(instance, value)
|
||||||
if pod:
|
if pod:
|
||||||
msg = 'Invalid entry(ies) for "{}": "{}"'
|
msg = 'Invalid entry(ies) for "{}": "{}"'
|
||||||
raise ValueError(msg.format(cls.name, '", "'.join(pod.keys())))
|
raise ValueError(msg.format(cls.name, '", "'.join(list(pod.keys()))))
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -380,7 +380,7 @@ class Configuration(object):
|
|||||||
|
|
||||||
|
|
||||||
def update_config(self, values, check_mandatory=True):
|
def update_config(self, values, check_mandatory=True):
|
||||||
for k, v in values.iteritems():
|
for k, v in values.items():
|
||||||
self.set(k, v, check_mandatory=check_mandatory)
|
self.set(k, v, check_mandatory=check_mandatory)
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
@ -824,7 +824,7 @@ class JobSpec(Configuration):
|
|||||||
def update_config(self, source, check_mandatory=True):
|
def update_config(self, source, check_mandatory=True):
|
||||||
self._sources.append(source)
|
self._sources.append(source)
|
||||||
values = source.config
|
values = source.config
|
||||||
for k, v in values.iteritems():
|
for k, v in values.items():
|
||||||
if k == "id":
|
if k == "id":
|
||||||
continue
|
continue
|
||||||
elif k.endswith('_parameters'):
|
elif k.endswith('_parameters'):
|
||||||
@ -849,7 +849,7 @@ class JobSpec(Configuration):
|
|||||||
if not config:
|
if not config:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for name, cfg_point in cfg_points.iteritems():
|
for name, cfg_point in cfg_points.items():
|
||||||
if name in config:
|
if name in config:
|
||||||
value = config.pop(name)
|
value = config.pop(name)
|
||||||
cfg_point.set_value(workload_params, value,
|
cfg_point.set_value(workload_params, value,
|
||||||
@ -873,7 +873,7 @@ class JobSpec(Configuration):
|
|||||||
runtime_parameters[source] = global_runtime_params[source]
|
runtime_parameters[source] = global_runtime_params[source]
|
||||||
|
|
||||||
# Add runtime parameters from JobSpec
|
# Add runtime parameters from JobSpec
|
||||||
for source, values in self.to_merge['runtime_parameters'].iteritems():
|
for source, values in self.to_merge['runtime_parameters'].items():
|
||||||
runtime_parameters[source] = values
|
runtime_parameters[source] = values
|
||||||
|
|
||||||
# Merge
|
# Merge
|
||||||
@ -884,9 +884,9 @@ class JobSpec(Configuration):
|
|||||||
for source in self._sources[1:]]) # ignore first id, "global"
|
for source in self._sources[1:]]) # ignore first id, "global"
|
||||||
|
|
||||||
# ensure *_parameters are always obj_dict's
|
# ensure *_parameters are always obj_dict's
|
||||||
self.boot_parameters = obj_dict((self.boot_parameters or {}).items())
|
self.boot_parameters = obj_dict(list((self.boot_parameters or {}).items()))
|
||||||
self.runtime_parameters = obj_dict((self.runtime_parameters or {}).items())
|
self.runtime_parameters = obj_dict(list((self.runtime_parameters or {}).items()))
|
||||||
self.workload_parameters = obj_dict((self.workload_parameters or {}).items())
|
self.workload_parameters = obj_dict(list((self.workload_parameters or {}).items()))
|
||||||
|
|
||||||
if self.label is None:
|
if self.label is None:
|
||||||
self.label = self.workload_name
|
self.label = self.workload_name
|
||||||
@ -903,7 +903,7 @@ class JobGenerator(object):
|
|||||||
self._read_augmentations = True
|
self._read_augmentations = True
|
||||||
if self._enabled_instruments is None:
|
if self._enabled_instruments is None:
|
||||||
self._enabled_instruments = []
|
self._enabled_instruments = []
|
||||||
for entry in self._enabled_augmentations.merge_with(self.disabled_augmentations).values():
|
for entry in list(self._enabled_augmentations.merge_with(self.disabled_augmentations).values()):
|
||||||
entry_cls = self.plugin_cache.get_plugin_class(entry)
|
entry_cls = self.plugin_cache.get_plugin_class(entry)
|
||||||
if entry_cls.kind == 'instrument':
|
if entry_cls.kind == 'instrument':
|
||||||
self._enabled_instruments.append(entry)
|
self._enabled_instruments.append(entry)
|
||||||
@ -914,7 +914,7 @@ class JobGenerator(object):
|
|||||||
self._read_augmentations = True
|
self._read_augmentations = True
|
||||||
if self._enabled_processors is None:
|
if self._enabled_processors is None:
|
||||||
self._enabled_processors = []
|
self._enabled_processors = []
|
||||||
for entry in self._enabled_augmentations.merge_with(self.disabled_augmentations).values():
|
for entry in list(self._enabled_augmentations.merge_with(self.disabled_augmentations).values()):
|
||||||
entry_cls = self.plugin_cache.get_plugin_class(entry)
|
entry_cls = self.plugin_cache.get_plugin_class(entry)
|
||||||
if entry_cls.kind == 'output_processor':
|
if entry_cls.kind == 'output_processor':
|
||||||
self._enabled_processors.append(entry)
|
self._enabled_processors.append(entry)
|
||||||
@ -934,7 +934,7 @@ class JobGenerator(object):
|
|||||||
self.job_spec_template.name = "globally specified job spec configuration"
|
self.job_spec_template.name = "globally specified job spec configuration"
|
||||||
self.job_spec_template.id = "global"
|
self.job_spec_template.id = "global"
|
||||||
# Load defaults
|
# Load defaults
|
||||||
for cfg_point in JobSpec.configuration.itervalues():
|
for cfg_point in JobSpec.configuration.values():
|
||||||
cfg_point.set_value(self.job_spec_template, check_mandatory=False)
|
cfg_point.set_value(self.job_spec_template, check_mandatory=False)
|
||||||
|
|
||||||
self.root_node = SectionNode(self.job_spec_template)
|
self.root_node = SectionNode(self.job_spec_template)
|
||||||
@ -996,7 +996,7 @@ class JobGenerator(object):
|
|||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
self.update_augmentations(job_spec.augmentations.values())
|
self.update_augmentations(list(job_spec.augmentations.values()))
|
||||||
specs.append(job_spec)
|
specs.append(job_spec)
|
||||||
return specs
|
return specs
|
||||||
|
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
import random
|
import random
|
||||||
from itertools import izip_longest, groupby, chain
|
from itertools import groupby, chain
|
||||||
|
|
||||||
|
from future.moves.itertools import zip_longest
|
||||||
|
|
||||||
from wa.framework.configuration.core import (MetaConfiguration, RunConfiguration,
|
from wa.framework.configuration.core import (MetaConfiguration, RunConfiguration,
|
||||||
JobGenerator, Status, settings)
|
JobGenerator, Status, settings)
|
||||||
@ -157,8 +159,8 @@ def permute_by_iteration(specs):
|
|||||||
all_tuples = []
|
all_tuples = []
|
||||||
for spec in chain(*groups):
|
for spec in chain(*groups):
|
||||||
all_tuples.append([(spec, i + 1)
|
all_tuples.append([(spec, i + 1)
|
||||||
for i in xrange(spec.iterations)])
|
for i in range(spec.iterations)])
|
||||||
for t in chain(*map(list, izip_longest(*all_tuples))):
|
for t in chain(*list(map(list, zip_longest(*all_tuples)))):
|
||||||
if t is not None:
|
if t is not None:
|
||||||
yield t
|
yield t
|
||||||
|
|
||||||
@ -183,8 +185,8 @@ def permute_by_section(specs):
|
|||||||
all_tuples = []
|
all_tuples = []
|
||||||
for spec in chain(*groups):
|
for spec in chain(*groups):
|
||||||
all_tuples.append([(spec, i + 1)
|
all_tuples.append([(spec, i + 1)
|
||||||
for i in xrange(spec.iterations)])
|
for i in range(spec.iterations)])
|
||||||
for t in chain(*map(list, izip_longest(*all_tuples))):
|
for t in chain(*list(map(list, zip_longest(*all_tuples)))):
|
||||||
if t is not None:
|
if t is not None:
|
||||||
yield t
|
yield t
|
||||||
|
|
||||||
@ -196,7 +198,7 @@ def permute_randomly(specs):
|
|||||||
"""
|
"""
|
||||||
result = []
|
result = []
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
for i in xrange(1, spec.iterations + 1):
|
for i in range(1, spec.iterations + 1):
|
||||||
result.append((spec, i))
|
result.append((spec, i))
|
||||||
random.shuffle(result)
|
random.shuffle(result)
|
||||||
for t in result:
|
for t in result:
|
||||||
@ -214,5 +216,5 @@ permute_map = {
|
|||||||
def permute_iterations(specs, exec_order):
|
def permute_iterations(specs, exec_order):
|
||||||
if exec_order not in permute_map:
|
if exec_order not in permute_map:
|
||||||
msg = 'Unknown execution order "{}"; must be in: {}'
|
msg = 'Unknown execution order "{}"; must be in: {}'
|
||||||
raise ValueError(msg.format(exec_order, permute_map.keys()))
|
raise ValueError(msg.format(exec_order, list(permute_map.keys())))
|
||||||
return permute_map[exec_order](specs)
|
return permute_map[exec_order](specs)
|
||||||
|
@ -21,6 +21,7 @@ from wa.framework.exception import ConfigError
|
|||||||
from wa.utils import log
|
from wa.utils import log
|
||||||
from wa.utils.serializer import json, read_pod, SerializerSyntaxError
|
from wa.utils.serializer import json, read_pod, SerializerSyntaxError
|
||||||
from wa.utils.types import toggle_set, counter
|
from wa.utils.types import toggle_set, counter
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('config')
|
logger = logging.getLogger('config')
|
||||||
@ -47,27 +48,27 @@ class ConfigParser(object):
|
|||||||
merge_augmentations(raw)
|
merge_augmentations(raw)
|
||||||
|
|
||||||
# Get WA core configuration
|
# Get WA core configuration
|
||||||
for cfg_point in state.settings.configuration.itervalues():
|
for cfg_point in state.settings.configuration.values():
|
||||||
value = pop_aliased_param(cfg_point, raw)
|
value = pop_aliased_param(cfg_point, raw)
|
||||||
if value is not None:
|
if value is not None:
|
||||||
logger.debug('Setting meta "{}" to "{}"'.format(cfg_point.name, value))
|
logger.debug('Setting meta "{}" to "{}"'.format(cfg_point.name, value))
|
||||||
state.settings.set(cfg_point.name, value)
|
state.settings.set(cfg_point.name, value)
|
||||||
|
|
||||||
# Get run specific configuration
|
# Get run specific configuration
|
||||||
for cfg_point in state.run_config.configuration.itervalues():
|
for cfg_point in state.run_config.configuration.values():
|
||||||
value = pop_aliased_param(cfg_point, raw)
|
value = pop_aliased_param(cfg_point, raw)
|
||||||
if value is not None:
|
if value is not None:
|
||||||
logger.debug('Setting run "{}" to "{}"'.format(cfg_point.name, value))
|
logger.debug('Setting run "{}" to "{}"'.format(cfg_point.name, value))
|
||||||
state.run_config.set(cfg_point.name, value)
|
state.run_config.set(cfg_point.name, value)
|
||||||
|
|
||||||
# Get global job spec configuration
|
# Get global job spec configuration
|
||||||
for cfg_point in JobSpec.configuration.itervalues():
|
for cfg_point in JobSpec.configuration.values():
|
||||||
value = pop_aliased_param(cfg_point, raw)
|
value = pop_aliased_param(cfg_point, raw)
|
||||||
if value is not None:
|
if value is not None:
|
||||||
logger.debug('Setting global "{}" to "{}"'.format(cfg_point.name, value))
|
logger.debug('Setting global "{}" to "{}"'.format(cfg_point.name, value))
|
||||||
state.jobs_config.set_global_value(cfg_point.name, value)
|
state.jobs_config.set_global_value(cfg_point.name, value)
|
||||||
|
|
||||||
for name, values in raw.iteritems():
|
for name, values in raw.items():
|
||||||
# Assume that all leftover config is for a plug-in or a global
|
# Assume that all leftover config is for a plug-in or a global
|
||||||
# alias it is up to PluginCache to assert this assumption
|
# alias it is up to PluginCache to assert this assumption
|
||||||
logger.debug('Caching "{}" with "{}"'.format(name, values))
|
logger.debug('Caching "{}" with "{}"'.format(name, values))
|
||||||
@ -106,7 +107,7 @@ class AgendaParser(object):
|
|||||||
|
|
||||||
if raw:
|
if raw:
|
||||||
msg = 'Invalid top level agenda entry(ies): "{}"'
|
msg = 'Invalid top level agenda entry(ies): "{}"'
|
||||||
raise ConfigError(msg.format('", "'.join(raw.keys())))
|
raise ConfigError(msg.format('", "'.join(list(raw.keys()))))
|
||||||
|
|
||||||
sect_ids, wkl_ids = self._collect_ids(sections, global_workloads)
|
sect_ids, wkl_ids = self._collect_ids(sections, global_workloads)
|
||||||
self._process_global_workloads(state, global_workloads, wkl_ids)
|
self._process_global_workloads(state, global_workloads, wkl_ids)
|
||||||
@ -301,7 +302,7 @@ def _construct_valid_entry(raw, seen_ids, prefix, jobs_config):
|
|||||||
merge_augmentations(raw)
|
merge_augmentations(raw)
|
||||||
|
|
||||||
# Validate all workload_entry
|
# Validate all workload_entry
|
||||||
for name, cfg_point in JobSpec.configuration.iteritems():
|
for name, cfg_point in JobSpec.configuration.items():
|
||||||
value = pop_aliased_param(cfg_point, raw)
|
value = pop_aliased_param(cfg_point, raw)
|
||||||
if value is not None:
|
if value is not None:
|
||||||
value = cfg_point.kind(value)
|
value = cfg_point.kind(value)
|
||||||
@ -317,7 +318,7 @@ def _construct_valid_entry(raw, seen_ids, prefix, jobs_config):
|
|||||||
# error if there are unknown workload_entry
|
# error if there are unknown workload_entry
|
||||||
if raw:
|
if raw:
|
||||||
msg = 'Invalid entry(ies) in "{}": "{}"'
|
msg = 'Invalid entry(ies) in "{}": "{}"'
|
||||||
raise ConfigError(msg.format(workload_entry['id'], ', '.join(raw.keys())))
|
raise ConfigError(msg.format(workload_entry['id'], ', '.join(list(raw.keys()))))
|
||||||
|
|
||||||
return workload_entry
|
return workload_entry
|
||||||
|
|
||||||
@ -339,7 +340,7 @@ def _collect_valid_id(entry_id, seen_ids, entry_type):
|
|||||||
|
|
||||||
|
|
||||||
def _get_workload_entry(workload):
|
def _get_workload_entry(workload):
|
||||||
if isinstance(workload, basestring):
|
if isinstance(workload, str):
|
||||||
workload = {'name': workload}
|
workload = {'name': workload}
|
||||||
elif not isinstance(workload, dict):
|
elif not isinstance(workload, dict):
|
||||||
raise ConfigError('Invalid workload entry: "{}"')
|
raise ConfigError('Invalid workload entry: "{}"')
|
||||||
|
@ -90,11 +90,11 @@ class PluginCache(object):
|
|||||||
msg = 'configuration provided for unknown plugin "{}"'
|
msg = 'configuration provided for unknown plugin "{}"'
|
||||||
raise ConfigError(msg.format(plugin_name))
|
raise ConfigError(msg.format(plugin_name))
|
||||||
|
|
||||||
if not hasattr(values, 'iteritems'):
|
if not hasattr(values, 'items'):
|
||||||
msg = 'Plugin configuration for "{}" not a dictionary ({} is {})'
|
msg = 'Plugin configuration for "{}" not a dictionary ({} is {})'
|
||||||
raise ConfigError(msg.format(plugin_name, repr(values), type(values)))
|
raise ConfigError(msg.format(plugin_name, repr(values), type(values)))
|
||||||
|
|
||||||
for name, value in values.iteritems():
|
for name, value in values.items():
|
||||||
if (plugin_name not in GENERIC_CONFIGS and
|
if (plugin_name not in GENERIC_CONFIGS and
|
||||||
name not in self.get_plugin_parameters(plugin_name)):
|
name not in self.get_plugin_parameters(plugin_name)):
|
||||||
msg = "'{}' is not a valid parameter for '{}'"
|
msg = "'{}' is not a valid parameter for '{}'"
|
||||||
@ -124,7 +124,7 @@ class PluginCache(object):
|
|||||||
for source in self.sources:
|
for source in self.sources:
|
||||||
if source not in plugin_config:
|
if source not in plugin_config:
|
||||||
continue
|
continue
|
||||||
for name, value in plugin_config[source].iteritems():
|
for name, value in plugin_config[source].items():
|
||||||
cfg_points[name].set_value(config, value=value)
|
cfg_points[name].set_value(config, value=value)
|
||||||
else:
|
else:
|
||||||
# A more complicated merge that involves priority of sources and
|
# A more complicated merge that involves priority of sources and
|
||||||
@ -136,7 +136,7 @@ class PluginCache(object):
|
|||||||
|
|
||||||
def get_plugin(self, name, kind=None, *args, **kwargs):
|
def get_plugin(self, name, kind=None, *args, **kwargs):
|
||||||
config = self.get_plugin_config(name)
|
config = self.get_plugin_config(name)
|
||||||
kwargs = dict(config.items() + kwargs.items())
|
kwargs = dict(list(config.items()) + list(kwargs.items()))
|
||||||
return self.loader.get_plugin(name, kind=kind, *args, **kwargs)
|
return self.loader.get_plugin(name, kind=kind, *args, **kwargs)
|
||||||
|
|
||||||
def get_plugin_class(self, name, kind=None):
|
def get_plugin_class(self, name, kind=None):
|
||||||
@ -154,18 +154,18 @@ class PluginCache(object):
|
|||||||
|
|
||||||
def _set_plugin_defaults(self, plugin_name, config):
|
def _set_plugin_defaults(self, plugin_name, config):
|
||||||
cfg_points = self.get_plugin_parameters(plugin_name)
|
cfg_points = self.get_plugin_parameters(plugin_name)
|
||||||
for cfg_point in cfg_points.itervalues():
|
for cfg_point in cfg_points.values():
|
||||||
cfg_point.set_value(config, check_mandatory=False)
|
cfg_point.set_value(config, check_mandatory=False)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_, alias_params = self.resolve_alias(plugin_name)
|
_, alias_params = self.resolve_alias(plugin_name)
|
||||||
for name, value in alias_params.iteritems():
|
for name, value in alias_params.items():
|
||||||
cfg_points[name].set_value(config, value)
|
cfg_points[name].set_value(config, value)
|
||||||
except NotFoundError:
|
except NotFoundError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _set_from_global_aliases(self, plugin_name, config):
|
def _set_from_global_aliases(self, plugin_name, config):
|
||||||
for alias, param in self._global_alias_map[plugin_name].iteritems():
|
for alias, param in self._global_alias_map[plugin_name].items():
|
||||||
if alias in self.global_alias_values:
|
if alias in self.global_alias_values:
|
||||||
for source in self.sources:
|
for source in self.sources:
|
||||||
if source not in self.global_alias_values[alias]:
|
if source not in self.global_alias_values[alias]:
|
||||||
@ -230,7 +230,7 @@ class PluginCache(object):
|
|||||||
|
|
||||||
# Validate final configuration
|
# Validate final configuration
|
||||||
merged_config.name = specific_name
|
merged_config.name = specific_name
|
||||||
for cfg_point in ms.cfg_points.itervalues():
|
for cfg_point in ms.cfg_points.values():
|
||||||
cfg_point.validate(merged_config, check_mandatory=is_final)
|
cfg_point.validate(merged_config, check_mandatory=is_final)
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
@ -285,7 +285,7 @@ class MergeState(object):
|
|||||||
def update_config_from_source(final_config, source, state):
|
def update_config_from_source(final_config, source, state):
|
||||||
if source in state.generic_config:
|
if source in state.generic_config:
|
||||||
final_config.name = state.generic_name
|
final_config.name = state.generic_name
|
||||||
for name, cfg_point in state.cfg_points.iteritems():
|
for name, cfg_point in state.cfg_points.items():
|
||||||
if name in state.generic_config[source]:
|
if name in state.generic_config[source]:
|
||||||
if name in state.seen_specific_config:
|
if name in state.seen_specific_config:
|
||||||
msg = ('"{generic_name}" configuration "{config_name}" has '
|
msg = ('"{generic_name}" configuration "{config_name}" has '
|
||||||
@ -307,7 +307,7 @@ def update_config_from_source(final_config, source, state):
|
|||||||
|
|
||||||
if source in state.specific_config:
|
if source in state.specific_config:
|
||||||
final_config.name = state.specific_name
|
final_config.name = state.specific_name
|
||||||
for name, cfg_point in state.cfg_points.iteritems():
|
for name, cfg_point in state.cfg_points.items():
|
||||||
if name in state.specific_config[source]:
|
if name in state.specific_config[source]:
|
||||||
state.seen_specific_config[name].append(str(source))
|
state.seen_specific_config[name].append(str(source))
|
||||||
value = state.specific_config[source].pop(name)
|
value = state.specific_config[source].pop(name)
|
||||||
|
@ -39,7 +39,7 @@ class JobSpecSource(object):
|
|||||||
def _log_self(self):
|
def _log_self(self):
|
||||||
logger.debug('Creating {} node'.format(self.kind))
|
logger.debug('Creating {} node'.format(self.kind))
|
||||||
with log.indentcontext():
|
with log.indentcontext():
|
||||||
for key, value in self.config.iteritems():
|
for key, value in self.config.items():
|
||||||
logger.debug('"{}" to "{}"'.format(key, value))
|
logger.debug('"{}" to "{}"'.format(key, value))
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,7 +20,11 @@ from wa.utils.misc import get_traceback
|
|||||||
|
|
||||||
class WAError(Exception):
|
class WAError(Exception):
|
||||||
"""Base class for all Workload Automation exceptions."""
|
"""Base class for all Workload Automation exceptions."""
|
||||||
pass
|
@property
|
||||||
|
def message(self):
|
||||||
|
if self.args:
|
||||||
|
return self.args[0]
|
||||||
|
return ''
|
||||||
|
|
||||||
|
|
||||||
class NotFoundError(WAError):
|
class NotFoundError(WAError):
|
||||||
|
@ -464,7 +464,7 @@ class Runner(object):
|
|||||||
self.logger.info('Skipping remaining jobs.')
|
self.logger.info('Skipping remaining jobs.')
|
||||||
self.context.skip_remaining_jobs()
|
self.context.skip_remaining_jobs()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
message = e.message if e.message else str(e)
|
message = e.args[0] if e.args else str(e)
|
||||||
log.log_error(e, self.logger)
|
log.log_error(e, self.logger)
|
||||||
self.logger.error('Skipping remaining jobs due to "{}".'.format(e))
|
self.logger.error('Skipping remaining jobs due to "{}".'.format(e))
|
||||||
self.context.skip_remaining_jobs()
|
self.context.skip_remaining_jobs()
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
This module contains the standard set of resource getters used by Workload Automation.
|
This module contains the standard set of resource getters used by Workload Automation.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import httplib
|
import http.client
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@ -233,13 +233,17 @@ class Http(ResourceGetter):
|
|||||||
return {}
|
return {}
|
||||||
index_url = urljoin(self.url, 'index.json')
|
index_url = urljoin(self.url, 'index.json')
|
||||||
response = self.geturl(index_url)
|
response = self.geturl(index_url)
|
||||||
if response.status_code != httplib.OK:
|
if response.status_code != http.client.OK:
|
||||||
message = 'Could not fetch "{}"; recieved "{} {}"'
|
message = 'Could not fetch "{}"; recieved "{} {}"'
|
||||||
self.logger.error(message.format(index_url,
|
self.logger.error(message.format(index_url,
|
||||||
response.status_code,
|
response.status_code,
|
||||||
response.reason))
|
response.reason))
|
||||||
return {}
|
return {}
|
||||||
return json.loads(response.content)
|
if sys.version_info[0] == 3:
|
||||||
|
content = response.content.decode('utf-8')
|
||||||
|
else:
|
||||||
|
content = response.content
|
||||||
|
return json.loads(content)
|
||||||
|
|
||||||
def download_asset(self, asset, owner_name):
|
def download_asset(self, asset, owner_name):
|
||||||
url = urljoin(self.url, owner_name, asset['path'])
|
url = urljoin(self.url, owner_name, asset['path'])
|
||||||
@ -252,7 +256,7 @@ class Http(ResourceGetter):
|
|||||||
return local_path
|
return local_path
|
||||||
self.logger.debug('Downloading {}'.format(url))
|
self.logger.debug('Downloading {}'.format(url))
|
||||||
response = self.geturl(url, stream=True)
|
response = self.geturl(url, stream=True)
|
||||||
if response.status_code != httplib.OK:
|
if response.status_code != http.client.OK:
|
||||||
message = 'Could not download asset "{}"; recieved "{} {}"'
|
message = 'Could not download asset "{}"; recieved "{} {}"'
|
||||||
self.logger.warning(message.format(url,
|
self.logger.warning(message.format(url,
|
||||||
response.status_code,
|
response.status_code,
|
||||||
@ -275,7 +279,7 @@ class Http(ResourceGetter):
|
|||||||
if not assets:
|
if not assets:
|
||||||
return None
|
return None
|
||||||
asset_map = {a['path']: a for a in assets}
|
asset_map = {a['path']: a for a in assets}
|
||||||
paths = get_path_matches(resource, asset_map.keys())
|
paths = get_path_matches(resource, list(asset_map.keys()))
|
||||||
local_paths = []
|
local_paths = []
|
||||||
for path in paths:
|
for path in paths:
|
||||||
local_paths.append(self.download_asset(asset_map[path],
|
local_paths.append(self.download_asset(asset_map[path],
|
||||||
@ -292,7 +296,7 @@ class Http(ResourceGetter):
|
|||||||
|
|
||||||
asset_map = {a['path']: a for a in assets}
|
asset_map = {a['path']: a for a in assets}
|
||||||
if resource.kind in ['jar', 'revent']:
|
if resource.kind in ['jar', 'revent']:
|
||||||
path = get_generic_resource(resource, asset_map.keys())
|
path = get_generic_resource(resource, list(asset_map.keys()))
|
||||||
if path:
|
if path:
|
||||||
return asset_map[path]
|
return asset_map[path]
|
||||||
elif resource.kind == 'executable':
|
elif resource.kind == 'executable':
|
||||||
|
@ -90,7 +90,7 @@ def convert_wa2_agenda(filepath, output_path):
|
|||||||
default=True),
|
default=True),
|
||||||
])
|
])
|
||||||
|
|
||||||
for param in orig_agenda.keys():
|
for param in list(orig_agenda.keys()):
|
||||||
for cfg_point in config_points:
|
for cfg_point in config_points:
|
||||||
if param == cfg_point.name or param in cfg_point.aliases:
|
if param == cfg_point.name or param in cfg_point.aliases:
|
||||||
if cfg_point.name == 'augmentations':
|
if cfg_point.name == 'augmentations':
|
||||||
@ -105,7 +105,7 @@ def convert_wa2_agenda(filepath, output_path):
|
|||||||
|
|
||||||
# Convert plugin configuration
|
# Convert plugin configuration
|
||||||
output.write("# Plugin Configuration\n")
|
output.write("# Plugin Configuration\n")
|
||||||
for param in orig_agenda.keys():
|
for param in list(orig_agenda.keys()):
|
||||||
if pluginloader.has_plugin(param):
|
if pluginloader.has_plugin(param):
|
||||||
entry = {param: orig_agenda.pop(param)}
|
entry = {param: orig_agenda.pop(param)}
|
||||||
yaml.dump(format_parameter(entry), output, default_flow_style=False)
|
yaml.dump(format_parameter(entry), output, default_flow_style=False)
|
||||||
@ -114,7 +114,7 @@ def convert_wa2_agenda(filepath, output_path):
|
|||||||
# Write any additional aliased parameters into new config
|
# Write any additional aliased parameters into new config
|
||||||
plugin_cache = PluginCache()
|
plugin_cache = PluginCache()
|
||||||
output.write("# Additional global aliases\n")
|
output.write("# Additional global aliases\n")
|
||||||
for param in orig_agenda.keys():
|
for param in list(orig_agenda.keys()):
|
||||||
if plugin_cache.is_global_alias(param):
|
if plugin_cache.is_global_alias(param):
|
||||||
entry = {param: orig_agenda.pop(param)}
|
entry = {param: orig_agenda.pop(param)}
|
||||||
yaml.dump(format_parameter(entry), output, default_flow_style=False)
|
yaml.dump(format_parameter(entry), output, default_flow_style=False)
|
||||||
@ -123,7 +123,7 @@ def convert_wa2_agenda(filepath, output_path):
|
|||||||
|
|
||||||
def format_parameter(param):
|
def format_parameter(param):
|
||||||
if isinstance(param, dict):
|
if isinstance(param, dict):
|
||||||
return {identifier(k) : v for k, v in param.iteritems()}
|
return {identifier(k) : v for k, v in param.items()}
|
||||||
else:
|
else:
|
||||||
return param
|
return param
|
||||||
|
|
||||||
|
@ -165,7 +165,7 @@ def priority(priority):
|
|||||||
def decorate(func):
|
def decorate(func):
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
wrapper.func_name = func.func_name
|
wrapper.__name__ = func.__name__
|
||||||
if priority in signal.CallbackPriority.levels:
|
if priority in signal.CallbackPriority.levels:
|
||||||
wrapper.priority = signal.CallbackPriority(priority)
|
wrapper.priority = signal.CallbackPriority(priority)
|
||||||
else:
|
else:
|
||||||
@ -255,7 +255,7 @@ class ManagedCallback(object):
|
|||||||
global failures_detected # pylint: disable=W0603
|
global failures_detected # pylint: disable=W0603
|
||||||
failures_detected = True
|
failures_detected = True
|
||||||
log_error(e, logger)
|
log_error(e, logger)
|
||||||
context.add_event(e.message)
|
context.add_event(e.args[0] if e.args else str(e))
|
||||||
if isinstance(e, WorkloadError):
|
if isinstance(e, WorkloadError):
|
||||||
context.set_status('FAILED')
|
context.set_status('FAILED')
|
||||||
elif isinstance(e, TargetError) or isinstance(e, TimeoutError):
|
elif isinstance(e, TargetError) or isinstance(e, TimeoutError):
|
||||||
@ -268,7 +268,7 @@ class ManagedCallback(object):
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
text = 'ManagedCallback({}, {})'
|
text = 'ManagedCallback({}, {})'
|
||||||
return text.format(self.instrument.name, self.callback.im_func.func_name)
|
return text.format(self.instrument.name, self.callback.__func__.__name__)
|
||||||
|
|
||||||
__str__ = __repr__
|
__str__ = __repr__
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ class Job(object):
|
|||||||
enabled_instruments = set(i.name for i in instrument.get_enabled())
|
enabled_instruments = set(i.name for i in instrument.get_enabled())
|
||||||
enabled_output_processors = set(p.name for p in pm.get_enabled())
|
enabled_output_processors = set(p.name for p in pm.get_enabled())
|
||||||
|
|
||||||
for augmentation in self.spec.augmentations.values():
|
for augmentation in list(self.spec.augmentations.values()):
|
||||||
augmentation_cls = context.cm.plugin_cache.get_plugin_class(augmentation)
|
augmentation_cls = context.cm.plugin_cache.get_plugin_class(augmentation)
|
||||||
if augmentation_cls.kind == 'instrument':
|
if augmentation_cls.kind == 'instrument':
|
||||||
instruments_to_enable.add(augmentation)
|
instruments_to_enable.add(augmentation)
|
||||||
|
@ -10,7 +10,7 @@ from wa.framework.configuration.execution import CombinedConfig
|
|||||||
from wa.framework.exception import HostError
|
from wa.framework.exception import HostError
|
||||||
from wa.framework.run import RunState, RunInfo
|
from wa.framework.run import RunState, RunInfo
|
||||||
from wa.framework.target.info import TargetInfo
|
from wa.framework.target.info import TargetInfo
|
||||||
from wa.utils.misc import touch, ensure_directory_exists
|
from wa.utils.misc import touch, ensure_directory_exists, isiterable
|
||||||
from wa.utils.serializer import write_pod, read_pod, is_pod
|
from wa.utils.serializer import write_pod, read_pod, is_pod
|
||||||
from wa.utils.types import enum, numeric
|
from wa.utils.types import enum, numeric
|
||||||
|
|
||||||
@ -229,7 +229,7 @@ class RunOutput(Output):
|
|||||||
if os.path.isfile(self.jobsfile):
|
if os.path.isfile(self.jobsfile):
|
||||||
self.job_specs = self.read_job_specs()
|
self.job_specs = self.read_job_specs()
|
||||||
|
|
||||||
for job_state in self.state.jobs.itervalues():
|
for job_state in self.state.jobs.values():
|
||||||
job_path = os.path.join(self.basepath, job_state.output_name)
|
job_path = os.path.join(self.basepath, job_state.output_name)
|
||||||
job = JobOutput(job_path, job_state.id,
|
job = JobOutput(job_path, job_state.id,
|
||||||
job_state.label, job_state.iteration,
|
job_state.label, job_state.iteration,
|
||||||
@ -387,14 +387,14 @@ class Result(object):
|
|||||||
if key not in self.metadata:
|
if key not in self.metadata:
|
||||||
return self.add_metadata(key, *args)
|
return self.add_metadata(key, *args)
|
||||||
|
|
||||||
if hasattr(self.metadata[key], 'iteritems'):
|
if hasattr(self.metadata[key], 'items'):
|
||||||
if len(args) == 2:
|
if len(args) == 2:
|
||||||
self.metadata[key][args[0]] = args[1]
|
self.metadata[key][args[0]] = args[1]
|
||||||
elif len(args) > 2: # assume list of key-value pairs
|
elif len(args) > 2: # assume list of key-value pairs
|
||||||
for k, v in args:
|
for k, v in args:
|
||||||
self.metadata[key][k] = v
|
self.metadata[key][k] = v
|
||||||
elif hasattr(args[0], 'iteritems'):
|
elif hasattr(args[0], 'items'):
|
||||||
for k, v in args[0].iteritems():
|
for k, v in args[0].items():
|
||||||
self.metadata[key][k] = v
|
self.metadata[key][k] = v
|
||||||
else:
|
else:
|
||||||
raise ValueError('Invalid value for key "{}": {}'.format(key, args))
|
raise ValueError('Invalid value for key "{}": {}'.format(key, args))
|
||||||
|
@ -25,6 +25,8 @@ from collections import OrderedDict, defaultdict
|
|||||||
from itertools import chain
|
from itertools import chain
|
||||||
from copy import copy
|
from copy import copy
|
||||||
|
|
||||||
|
from future.utils import with_metaclass
|
||||||
|
|
||||||
from wa.framework.configuration.core import settings, ConfigurationPoint as Parameter
|
from wa.framework.configuration.core import settings, ConfigurationPoint as Parameter
|
||||||
from wa.framework.exception import (NotFoundError, PluginLoaderError, TargetError,
|
from wa.framework.exception import (NotFoundError, PluginLoaderError, TargetError,
|
||||||
ValidationError, ConfigError, HostError)
|
ValidationError, ConfigError, HostError)
|
||||||
@ -34,7 +36,10 @@ from wa.utils.misc import (ensure_directory_exists as _d, walk_modules, load_cla
|
|||||||
from wa.utils.types import identifier
|
from wa.utils.types import identifier
|
||||||
|
|
||||||
|
|
||||||
MODNAME_TRANS = string.maketrans(':/\\.', '____')
|
if sys.version_info[0] == 3:
|
||||||
|
MODNAME_TRANS = str.maketrans(':/\\.', '____')
|
||||||
|
else:
|
||||||
|
MODNAME_TRANS = string.maketrans(':/\\.', '____')
|
||||||
|
|
||||||
|
|
||||||
class AttributeCollection(object):
|
class AttributeCollection(object):
|
||||||
@ -50,7 +55,7 @@ class AttributeCollection(object):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def values(self):
|
def values(self):
|
||||||
return self._attrs.values()
|
return list(self._attrs.values())
|
||||||
|
|
||||||
def __init__(self, attrcls):
|
def __init__(self, attrcls):
|
||||||
self._attrcls = attrcls
|
self._attrcls = attrcls
|
||||||
@ -61,7 +66,7 @@ class AttributeCollection(object):
|
|||||||
if p.name in self._attrs:
|
if p.name in self._attrs:
|
||||||
if p.override:
|
if p.override:
|
||||||
newp = copy(self._attrs[p.name])
|
newp = copy(self._attrs[p.name])
|
||||||
for a, v in p.__dict__.iteritems():
|
for a, v in p.__dict__.items():
|
||||||
if v is not None:
|
if v is not None:
|
||||||
setattr(newp, a, v)
|
setattr(newp, a, v)
|
||||||
if not hasattr(newp, "_overridden"):
|
if not hasattr(newp, "_overridden"):
|
||||||
@ -77,7 +82,7 @@ class AttributeCollection(object):
|
|||||||
append = add
|
append = add
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return 'AC({})'.format(map(str, self._attrs.values()))
|
return 'AC({})'.format(list(map(str, list(self._attrs.values()))))
|
||||||
|
|
||||||
__repr__ = __str__
|
__repr__ = __str__
|
||||||
|
|
||||||
@ -212,14 +217,14 @@ class PluginMeta(type):
|
|||||||
if hasattr(cls, 'aliases'):
|
if hasattr(cls, 'aliases'):
|
||||||
aliases, cls.aliases = cls.aliases, AliasCollection()
|
aliases, cls.aliases = cls.aliases, AliasCollection()
|
||||||
for alias in aliases:
|
for alias in aliases:
|
||||||
if isinstance(alias, basestring):
|
if isinstance(alias, str):
|
||||||
alias = Alias(alias)
|
alias = Alias(alias)
|
||||||
alias.validate(cls)
|
alias.validate(cls)
|
||||||
alias.plugin_name = cls.name
|
alias.plugin_name = cls.name
|
||||||
cls.aliases.add(alias)
|
cls.aliases.add(alias)
|
||||||
|
|
||||||
|
|
||||||
class Plugin(object):
|
class Plugin(with_metaclass(PluginMeta, object)):
|
||||||
"""
|
"""
|
||||||
Base class for all WA plugins. An plugin is basically a plug-in. It
|
Base class for all WA plugins. An plugin is basically a plug-in. It
|
||||||
extends the functionality of WA in some way. Plugins are discovered and
|
extends the functionality of WA in some way. Plugins are discovered and
|
||||||
@ -230,7 +235,6 @@ class Plugin(object):
|
|||||||
``~/.workload_automation/``.
|
``~/.workload_automation/``.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
__metaclass__ = PluginMeta
|
|
||||||
|
|
||||||
kind = None
|
kind = None
|
||||||
name = None
|
name = None
|
||||||
@ -334,7 +338,7 @@ class Plugin(object):
|
|||||||
can = has
|
can = has
|
||||||
|
|
||||||
def _load_module(self, loader, module_spec):
|
def _load_module(self, loader, module_spec):
|
||||||
if isinstance(module_spec, basestring):
|
if isinstance(module_spec, str):
|
||||||
name = module_spec
|
name = module_spec
|
||||||
params = {}
|
params = {}
|
||||||
elif isinstance(module_spec, dict):
|
elif isinstance(module_spec, dict):
|
||||||
@ -342,7 +346,7 @@ class Plugin(object):
|
|||||||
msg = 'Invalid module spec: {}; dict must have exctly one key -- '\
|
msg = 'Invalid module spec: {}; dict must have exctly one key -- '\
|
||||||
'the module name.'
|
'the module name.'
|
||||||
raise ValueError(msg.format(module_spec))
|
raise ValueError(msg.format(module_spec))
|
||||||
name, params = module_spec.items()[0]
|
name, params = list(module_spec.items())[0]
|
||||||
else:
|
else:
|
||||||
message = 'Invalid module spec: {}; must be a string or a one-key dict.'
|
message = 'Invalid module spec: {}; must be a string or a one-key dict.'
|
||||||
raise ValueError(message.format(module_spec))
|
raise ValueError(message.format(module_spec))
|
||||||
@ -491,7 +495,7 @@ class PluginLoader(object):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
name, base_kwargs = self.resolve_alias(name)
|
name, base_kwargs = self.resolve_alias(name)
|
||||||
kwargs = OrderedDict(chain(base_kwargs.iteritems(), kwargs.iteritems()))
|
kwargs = OrderedDict(chain(iter(base_kwargs.items()), iter(kwargs.items())))
|
||||||
cls = self.get_plugin_class(name, kind)
|
cls = self.get_plugin_class(name, kind)
|
||||||
plugin = cls(*args, **kwargs)
|
plugin = cls(*args, **kwargs)
|
||||||
return plugin
|
return plugin
|
||||||
@ -514,10 +518,10 @@ class PluginLoader(object):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
if kind is None:
|
if kind is None:
|
||||||
return self.plugins.values()
|
return list(self.plugins.values())
|
||||||
if kind not in self.kind_map:
|
if kind not in self.kind_map:
|
||||||
raise ValueError('Unknown plugin type: {}'.format(kind))
|
raise ValueError('Unknown plugin type: {}'.format(kind))
|
||||||
return self.kind_map[kind].values()
|
return list(self.kind_map[kind].values())
|
||||||
|
|
||||||
def has_plugin(self, name, kind=None):
|
def has_plugin(self, name, kind=None):
|
||||||
"""
|
"""
|
||||||
@ -625,7 +629,7 @@ class PluginLoader(object):
|
|||||||
modname = os.path.splitext(filepath[1:])[0].translate(MODNAME_TRANS)
|
modname = os.path.splitext(filepath[1:])[0].translate(MODNAME_TRANS)
|
||||||
module = imp.load_source(modname, filepath)
|
module = imp.load_source(modname, filepath)
|
||||||
self._discover_in_module(module)
|
self._discover_in_module(module)
|
||||||
except (SystemExit, ImportError), e:
|
except (SystemExit, ImportError) as e:
|
||||||
if self.keep_going:
|
if self.keep_going:
|
||||||
self.logger.warning('Failed to load {}'.format(filepath))
|
self.logger.warning('Failed to load {}'.format(filepath))
|
||||||
self.logger.warning('Got: {}'.format(e))
|
self.logger.warning('Got: {}'.format(e))
|
||||||
@ -639,7 +643,7 @@ class PluginLoader(object):
|
|||||||
def _discover_in_module(self, module): # NOQA pylint: disable=too-many-branches
|
def _discover_in_module(self, module): # NOQA pylint: disable=too-many-branches
|
||||||
self.logger.debug('Checking module %s', module.__name__)
|
self.logger.debug('Checking module %s', module.__name__)
|
||||||
with log.indentcontext():
|
with log.indentcontext():
|
||||||
for obj in vars(module).itervalues():
|
for obj in vars(module).values():
|
||||||
if inspect.isclass(obj):
|
if inspect.isclass(obj):
|
||||||
if not issubclass(obj, Plugin):
|
if not issubclass(obj, Plugin):
|
||||||
continue
|
continue
|
||||||
|
@ -21,7 +21,7 @@ class __LoaderWrapper(object):
|
|||||||
def kinds(self):
|
def kinds(self):
|
||||||
if not self._loader:
|
if not self._loader:
|
||||||
self.reset()
|
self.reset()
|
||||||
return self._loader.kind_map.keys()
|
return list(self._loader.kind_map.keys())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def kind_map(self):
|
def kind_map(self):
|
||||||
|
@ -287,7 +287,7 @@ def loose_version_matching(config_version, apk_version):
|
|||||||
if len(apk_version) < len(config_version):
|
if len(apk_version) < len(config_version):
|
||||||
return False # More specific version requested than available
|
return False # More specific version requested than available
|
||||||
|
|
||||||
for i in xrange(len(config_version)):
|
for i in range(len(config_version)):
|
||||||
if config_version[i] != apk_version[i]:
|
if config_version[i] != apk_version[i]:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
@ -76,7 +76,7 @@ class RunState(object):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def num_completed_jobs(self):
|
def num_completed_jobs(self):
|
||||||
return sum(1 for js in self.jobs.itervalues()
|
return sum(1 for js in self.jobs.values()
|
||||||
if js.status > Status.RUNNING)
|
if js.status > Status.RUNNING)
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -95,7 +95,7 @@ class RunState(object):
|
|||||||
|
|
||||||
def get_status_counts(self):
|
def get_status_counts(self):
|
||||||
counter = Counter()
|
counter = Counter()
|
||||||
for job_state in self.jobs.itervalues():
|
for job_state in self.jobs.values():
|
||||||
counter[job_state.status] += 1
|
counter[job_state.status] += 1
|
||||||
return counter
|
return counter
|
||||||
|
|
||||||
@ -103,7 +103,7 @@ class RunState(object):
|
|||||||
return OrderedDict(
|
return OrderedDict(
|
||||||
status=str(self.status),
|
status=str(self.status),
|
||||||
timestamp=self.timestamp,
|
timestamp=self.timestamp,
|
||||||
jobs=[j.to_pod() for j in self.jobs.itervalues()],
|
jobs=[j.to_pod() for j in self.jobs.values()],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ def list_target_descriptions(loader=pluginloader):
|
|||||||
raise PluginLoaderError(msg.format(desc.name, prev_dtor.name,
|
raise PluginLoaderError(msg.format(desc.name, prev_dtor.name,
|
||||||
descriptor.name))
|
descriptor.name))
|
||||||
targets[desc.name] = desc
|
targets[desc.name] = desc
|
||||||
return targets.values()
|
return list(targets.values())
|
||||||
|
|
||||||
|
|
||||||
def get_target_description(name, loader=pluginloader):
|
def get_target_description(name, loader=pluginloader):
|
||||||
@ -47,11 +47,11 @@ def instantiate_target(tdesc, params, connect=None, extra_platform_params=None):
|
|||||||
tp, pp, cp = {}, {}, {}
|
tp, pp, cp = {}, {}, {}
|
||||||
|
|
||||||
for supported_params, new_params in (target_params, tp), (platform_params, pp), (conn_params, cp):
|
for supported_params, new_params in (target_params, tp), (platform_params, pp), (conn_params, cp):
|
||||||
for name, value in supported_params.iteritems():
|
for name, value in supported_params.items():
|
||||||
if value.default and name == value.name:
|
if value.default and name == value.name:
|
||||||
new_params[name] = value.default
|
new_params[name] = value.default
|
||||||
|
|
||||||
for name, value in params.iteritems():
|
for name, value in params.items():
|
||||||
if name in target_params:
|
if name in target_params:
|
||||||
tp[name] = value
|
tp[name] = value
|
||||||
elif name in platform_params:
|
elif name in platform_params:
|
||||||
@ -64,7 +64,7 @@ def instantiate_target(tdesc, params, connect=None, extra_platform_params=None):
|
|||||||
msg = 'Unexpected parameter for {}: {}'
|
msg = 'Unexpected parameter for {}: {}'
|
||||||
raise ValueError(msg.format(tdesc.name, name))
|
raise ValueError(msg.format(tdesc.name, name))
|
||||||
|
|
||||||
for pname, pval in (extra_platform_params or {}).iteritems():
|
for pname, pval in (extra_platform_params or {}).items():
|
||||||
if pname in pp:
|
if pname in pp:
|
||||||
raise RuntimeError('Platform parameter clash: {}'.format(pname))
|
raise RuntimeError('Platform parameter clash: {}'.format(pname))
|
||||||
pp[pname] = pval
|
pp[pname] = pval
|
||||||
@ -121,7 +121,7 @@ class TargetDescription(object):
|
|||||||
vals = []
|
vals = []
|
||||||
elif isiterable(vals):
|
elif isiterable(vals):
|
||||||
if hasattr(vals, 'values'):
|
if hasattr(vals, 'values'):
|
||||||
vals = v.values()
|
vals = list(v.values())
|
||||||
else:
|
else:
|
||||||
msg = '{} must be iterable; got "{}"'
|
msg = '{} must be iterable; got "{}"'
|
||||||
raise ValueError(msg.format(attr, vals))
|
raise ValueError(msg.format(attr, vals))
|
||||||
@ -453,11 +453,11 @@ class DefaultTargetDescriptor(TargetDescriptor):
|
|||||||
|
|
||||||
def get_descriptions(self):
|
def get_descriptions(self):
|
||||||
result = []
|
result = []
|
||||||
for target_name, target_tuple in TARGETS.iteritems():
|
for target_name, target_tuple in TARGETS.items():
|
||||||
(target, conn), target_params = self._get_item(target_tuple)
|
(target, conn), target_params = self._get_item(target_tuple)
|
||||||
assistant = ASSISTANTS[target_name]
|
assistant = ASSISTANTS[target_name]
|
||||||
conn_params = CONNECTION_PARAMS[conn]
|
conn_params = CONNECTION_PARAMS[conn]
|
||||||
for platform_name, platform_tuple in PLATFORMS.iteritems():
|
for platform_name, platform_tuple in PLATFORMS.items():
|
||||||
(platform, plat_conn), platform_params = self._get_item(platform_tuple)
|
(platform, plat_conn), platform_params = self._get_item(platform_tuple)
|
||||||
name = '{}_{}'.format(platform_name, target_name)
|
name = '{}_{}'.format(platform_name, target_name)
|
||||||
td = TargetDescription(name, self)
|
td = TargetDescription(name, self)
|
||||||
@ -484,11 +484,11 @@ class DefaultTargetDescriptor(TargetDescriptor):
|
|||||||
return cls, params
|
return cls, params
|
||||||
|
|
||||||
param_map = OrderedDict((p.name, copy(p)) for p in params)
|
param_map = OrderedDict((p.name, copy(p)) for p in params)
|
||||||
for name, value in defaults.iteritems():
|
for name, value in defaults.items():
|
||||||
if name not in param_map:
|
if name not in param_map:
|
||||||
raise ValueError('Unexpected default "{}"'.format(name))
|
raise ValueError('Unexpected default "{}"'.format(name))
|
||||||
param_map[name].default = value
|
param_map[name].default = value
|
||||||
return cls, param_map.values()
|
return cls, list(param_map.values())
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -522,7 +522,7 @@ def create_target_description(name, *args, **kwargs):
|
|||||||
def _get_target_defaults(target):
|
def _get_target_defaults(target):
|
||||||
specificity = 0
|
specificity = 0
|
||||||
res = ('linux', TARGETS['linux']) # fallback to a generic linux target
|
res = ('linux', TARGETS['linux']) # fallback to a generic linux target
|
||||||
for name, ttup in TARGETS.iteritems():
|
for name, ttup in TARGETS.items():
|
||||||
if issubclass(target, ttup[0][0]):
|
if issubclass(target, ttup[0][0]):
|
||||||
new_spec = len(inspect.getmro(ttup[0][0]))
|
new_spec = len(inspect.getmro(ttup[0][0]))
|
||||||
if new_spec > specificity:
|
if new_spec > specificity:
|
||||||
@ -540,7 +540,7 @@ def add_description_for_target(target, description=None, **kwargs):
|
|||||||
if 'platform' not in kwargs:
|
if 'platform' not in kwargs:
|
||||||
kwargs['platform'] = Platform
|
kwargs['platform'] = Platform
|
||||||
if 'platform_params' not in kwargs:
|
if 'platform_params' not in kwargs:
|
||||||
for (plat, conn), params, _ in PLATFORMS.itervalues():
|
for (plat, conn), params, _ in PLATFORMS.values():
|
||||||
if plat == kwargs['platform']:
|
if plat == kwargs['platform']:
|
||||||
kwargs['platform_params'] = params
|
kwargs['platform_params'] = params
|
||||||
if conn is not None and kwargs['conn'] is None:
|
if conn is not None and kwargs['conn'] is None:
|
||||||
|
@ -10,7 +10,7 @@ def cpuinfo_from_pod(pod):
|
|||||||
cpuinfo.sections = pod['cpuinfo']
|
cpuinfo.sections = pod['cpuinfo']
|
||||||
lines = []
|
lines = []
|
||||||
for section in cpuinfo.sections:
|
for section in cpuinfo.sections:
|
||||||
for key, value in section.iteritems():
|
for key, value in section.items():
|
||||||
line = '{}: {}'.format(key, value)
|
line = '{}: {}'.format(key, value)
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
lines.append('')
|
lines.append('')
|
||||||
@ -35,7 +35,7 @@ def kernel_config_from_pod(pod):
|
|||||||
config = KernelConfig('')
|
config = KernelConfig('')
|
||||||
config._config = pod['kernel_config']
|
config._config = pod['kernel_config']
|
||||||
lines = []
|
lines = []
|
||||||
for key, value in config._config.iteritems():
|
for key, value in config._config.items():
|
||||||
if value == 'n':
|
if value == 'n':
|
||||||
lines.append('# {} is not set'.format(key))
|
lines.append('# {} is not set'.format(key))
|
||||||
else:
|
else:
|
||||||
|
@ -33,7 +33,7 @@ class RuntimeConfig(Plugin):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_parameters(self):
|
def supported_parameters(self):
|
||||||
return self._runtime_params.values()
|
return list(self._runtime_params.values())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def core_names(self):
|
def core_names(self):
|
||||||
@ -166,12 +166,12 @@ class HotplugRuntimeConfig(RuntimeConfig):
|
|||||||
|
|
||||||
def validate_parameters(self):
|
def validate_parameters(self):
|
||||||
if len(self.num_cores) == self.target.number_of_cpus:
|
if len(self.num_cores) == self.target.number_of_cpus:
|
||||||
if all(v is False for v in self.num_cores.values()):
|
if all(v is False for v in list(self.num_cores.values())):
|
||||||
raise ValueError('Cannot set number of all cores to 0')
|
raise ValueError('Cannot set number of all cores to 0')
|
||||||
|
|
||||||
def commit(self):
|
def commit(self):
|
||||||
'''Online all CPUs required in order before then off-lining'''
|
'''Online all CPUs required in order before then off-lining'''
|
||||||
num_cores = sorted(self.num_cores.iteritems())
|
num_cores = sorted(self.num_cores.items())
|
||||||
for cpu, online in num_cores:
|
for cpu, online in num_cores:
|
||||||
if online:
|
if online:
|
||||||
self.target.hotplug.online(cpu)
|
self.target.hotplug.online(cpu)
|
||||||
@ -190,7 +190,7 @@ class SysfileValuesRuntimeConfig(RuntimeConfig):
|
|||||||
#pylint: disable=unused-argument
|
#pylint: disable=unused-argument
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_sysfile(obj, value, core):
|
def set_sysfile(obj, value, core):
|
||||||
for path, value in value.iteritems():
|
for path, value in value.items():
|
||||||
verify = True
|
verify = True
|
||||||
if path.endswith('!'):
|
if path.endswith('!'):
|
||||||
verify = False
|
verify = False
|
||||||
@ -222,7 +222,7 @@ class SysfileValuesRuntimeConfig(RuntimeConfig):
|
|||||||
return
|
return
|
||||||
|
|
||||||
def commit(self):
|
def commit(self):
|
||||||
for path, (value, verify) in self.sysfile_values.iteritems():
|
for path, (value, verify) in self.sysfile_values.items():
|
||||||
self.target.write_value(path, value, verify=verify)
|
self.target.write_value(path, value, verify=verify)
|
||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
@ -255,7 +255,7 @@ class FreqValue(object):
|
|||||||
raise TargetError(msg.format(value))
|
raise TargetError(msg.format(value))
|
||||||
elif isinstance(value, int) and value in self.values:
|
elif isinstance(value, int) and value in self.values:
|
||||||
return value
|
return value
|
||||||
elif isinstance(value, basestring):
|
elif isinstance(value, str):
|
||||||
value = caseless_string(value)
|
value = caseless_string(value)
|
||||||
if value in ['min', 'max']:
|
if value in ['min', 'max']:
|
||||||
return value
|
return value
|
||||||
@ -675,7 +675,7 @@ class IdleStateValue(object):
|
|||||||
if self.values is None:
|
if self.values is None:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, str):
|
||||||
value = caseless_string(value)
|
value = caseless_string(value)
|
||||||
if value == 'all':
|
if value == 'all':
|
||||||
return [state[0] for state in self.values]
|
return [state[0] for state in self.values]
|
||||||
|
@ -39,7 +39,7 @@ class RuntimeParameterManager(object):
|
|||||||
def merge_runtime_parameters(self, parameters):
|
def merge_runtime_parameters(self, parameters):
|
||||||
merged_params = obj_dict()
|
merged_params = obj_dict()
|
||||||
for source in parameters:
|
for source in parameters:
|
||||||
for name, value in parameters[source].iteritems():
|
for name, value in parameters[source].items():
|
||||||
cp = self.get_cfg_point(name)
|
cp = self.get_cfg_point(name)
|
||||||
cp.set_value(merged_params, value)
|
cp.set_value(merged_params, value)
|
||||||
return dict(merged_params)
|
return dict(merged_params)
|
||||||
@ -60,7 +60,7 @@ class RuntimeParameterManager(object):
|
|||||||
|
|
||||||
# Stores a set of parameters performing isolated validation when appropriate
|
# Stores a set of parameters performing isolated validation when appropriate
|
||||||
def set_runtime_parameters(self, parameters):
|
def set_runtime_parameters(self, parameters):
|
||||||
for name, value in parameters.iteritems():
|
for name, value in parameters.items():
|
||||||
cfg = self.get_config_for_name(name)
|
cfg = self.get_config_for_name(name)
|
||||||
if cfg is None:
|
if cfg is None:
|
||||||
msg = 'Unsupported runtime parameter: "{}"'
|
msg = 'Unsupported runtime parameter: "{}"'
|
||||||
@ -74,14 +74,14 @@ class RuntimeParameterManager(object):
|
|||||||
|
|
||||||
def get_config_for_name(self, name):
|
def get_config_for_name(self, name):
|
||||||
name = caseless_string(name)
|
name = caseless_string(name)
|
||||||
for k, v in self.runtime_params.iteritems():
|
for k, v in self.runtime_params.items():
|
||||||
if name == k:
|
if name == k:
|
||||||
return v.rt_config
|
return v.rt_config
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_cfg_point(self, name):
|
def get_cfg_point(self, name):
|
||||||
name = caseless_string(name)
|
name = caseless_string(name)
|
||||||
for k, v in self.runtime_params.iteritems():
|
for k, v in self.runtime_params.items():
|
||||||
if name == k:
|
if name == k:
|
||||||
return v.cfg_point
|
return v.cfg_point
|
||||||
raise ConfigError('Unknown runtime parameter: {}'.format(name))
|
raise ConfigError('Unknown runtime parameter: {}'.format(name))
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
|
|
||||||
@ -45,4 +46,7 @@ def get_commit():
|
|||||||
p.wait()
|
p.wait()
|
||||||
if p.returncode:
|
if p.returncode:
|
||||||
return None
|
return None
|
||||||
return std[:8]
|
if sys.version_info[0] == 3:
|
||||||
|
return std[:8].decode(sys.stdout.encoding)
|
||||||
|
else:
|
||||||
|
return std[:8]
|
||||||
|
@ -31,7 +31,7 @@ class DmesgInstrument(Instrument):
|
|||||||
name = 'dmesg'
|
name = 'dmesg'
|
||||||
|
|
||||||
parameters = [
|
parameters = [
|
||||||
Parameter('loglevel', kind=int, allowed_values=range(8),
|
Parameter('loglevel', kind=int, allowed_values=list(range(8)),
|
||||||
description='Set loglevel for console output.')
|
description='Set loglevel for console output.')
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
|
|
||||||
# pylint: disable=W0613,E1101
|
# pylint: disable=W0613,E1101
|
||||||
from __future__ import division
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
@ -413,9 +413,9 @@ class EnergyMeasurement(Instrument):
|
|||||||
self.params = obj_dict()
|
self.params = obj_dict()
|
||||||
|
|
||||||
instrument_parameters = {identifier(k): v
|
instrument_parameters = {identifier(k): v
|
||||||
for k, v in self.instrument_parameters.iteritems()}
|
for k, v in self.instrument_parameters.items()}
|
||||||
supported_params = self.backend.get_parameters()
|
supported_params = self.backend.get_parameters()
|
||||||
for name, param in supported_params.iteritems():
|
for name, param in supported_params.items():
|
||||||
value = instrument_parameters.pop(name, None)
|
value = instrument_parameters.pop(name, None)
|
||||||
param.set_value(self.params, value)
|
param.set_value(self.params, value)
|
||||||
if instrument_parameters:
|
if instrument_parameters:
|
||||||
@ -426,7 +426,7 @@ class EnergyMeasurement(Instrument):
|
|||||||
def initialize(self, context):
|
def initialize(self, context):
|
||||||
self.instruments = self.backend.get_instruments(self.target, context.run_output.metadir, **self.params)
|
self.instruments = self.backend.get_instruments(self.target, context.run_output.metadir, **self.params)
|
||||||
|
|
||||||
for instrument in self.instruments.itervalues():
|
for instrument in self.instruments.values():
|
||||||
if not (instrument.mode & CONTINUOUS):
|
if not (instrument.mode & CONTINUOUS):
|
||||||
msg = '{} instrument does not support continuous measurement collection'
|
msg = '{} instrument does not support continuous measurement collection'
|
||||||
raise ConfigError(msg.format(self.instrument))
|
raise ConfigError(msg.format(self.instrument))
|
||||||
@ -436,26 +436,26 @@ class EnergyMeasurement(Instrument):
|
|||||||
# Check that the expeccted channels exist.
|
# Check that the expeccted channels exist.
|
||||||
# If there are multiple Instruments, they were all constructed with
|
# If there are multiple Instruments, they were all constructed with
|
||||||
# the same channels param, so check them all.
|
# the same channels param, so check them all.
|
||||||
for instrument in self.instruments.itervalues():
|
for instrument in self.instruments.values():
|
||||||
if not instrument.get_channels(channel):
|
if not instrument.get_channels(channel):
|
||||||
raise ConfigError('No channels found for "{}"'.format(channel))
|
raise ConfigError('No channels found for "{}"'.format(channel))
|
||||||
|
|
||||||
def setup(self, context):
|
def setup(self, context):
|
||||||
for instrument in self.instruments.itervalues():
|
for instrument in self.instruments.values():
|
||||||
instrument.reset(sites=self.sites,
|
instrument.reset(sites=self.sites,
|
||||||
kinds=self.kinds,
|
kinds=self.kinds,
|
||||||
channels=self.channels)
|
channels=self.channels)
|
||||||
|
|
||||||
def start(self, context):
|
def start(self, context):
|
||||||
for instrument in self.instruments.itervalues():
|
for instrument in self.instruments.values():
|
||||||
instrument.start()
|
instrument.start()
|
||||||
|
|
||||||
def stop(self, context):
|
def stop(self, context):
|
||||||
for instrument in self.instruments.itervalues():
|
for instrument in self.instruments.values():
|
||||||
instrument.stop()
|
instrument.stop()
|
||||||
|
|
||||||
def update_output(self, context):
|
def update_output(self, context):
|
||||||
for device, instrument in self.instruments.iteritems():
|
for device, instrument in self.instruments.items():
|
||||||
# Append the device key to the filename and artifact name, unless
|
# Append the device key to the filename and artifact name, unless
|
||||||
# it's None (as it will be for backends with only 1
|
# it's None (as it will be for backends with only 1
|
||||||
# devce/instrument)
|
# devce/instrument)
|
||||||
@ -501,7 +501,7 @@ class EnergyMeasurement(Instrument):
|
|||||||
# the devlib instrument, before we potentially appended a device key to
|
# the devlib instrument, before we potentially appended a device key to
|
||||||
# it)
|
# it)
|
||||||
if len(self.instruments) > 1:
|
if len(self.instruments) > 1:
|
||||||
for name, metrics in metrics_by_name.iteritems():
|
for name, metrics in metrics_by_name.items():
|
||||||
units = metrics[0].units
|
units = metrics[0].units
|
||||||
value = sum(m.value for m in metrics)
|
value = sum(m.value for m in metrics)
|
||||||
context.add_metric(name, value, units)
|
context.add_metric(name, value, units)
|
||||||
|
@ -58,11 +58,11 @@ class HwmonInstrument(Instrument):
|
|||||||
measurements_before = {m.channel.label: m for m in self.before}
|
measurements_before = {m.channel.label: m for m in self.before}
|
||||||
measurements_after = {m.channel.label: m for m in self.after}
|
measurements_after = {m.channel.label: m for m in self.after}
|
||||||
|
|
||||||
if measurements_before.keys() != measurements_after.keys():
|
if list(measurements_before.keys()) != list(measurements_after.keys()):
|
||||||
self.logger.warning(
|
self.logger.warning(
|
||||||
'hwmon before/after measurements returned different entries!')
|
'hwmon before/after measurements returned different entries!')
|
||||||
|
|
||||||
for label, measurement_after in measurements_after.iteritems():
|
for label, measurement_after in measurements_after.items():
|
||||||
if label not in measurements_before:
|
if label not in measurements_before:
|
||||||
continue # We've already warned about this
|
continue # We've already warned about this
|
||||||
measurement_before = measurements_before[label]
|
measurement_before = measurements_before[label]
|
||||||
|
@ -30,16 +30,17 @@ import re
|
|||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
import tarfile
|
import tarfile
|
||||||
from itertools import izip, izip_longest
|
|
||||||
from subprocess import CalledProcessError
|
from subprocess import CalledProcessError
|
||||||
|
|
||||||
from devlib.exception import TargetError
|
from future.moves.itertools import zip_longest
|
||||||
|
|
||||||
|
from devlib.exception import TargetError
|
||||||
from devlib.utils.android import ApkInfo
|
from devlib.utils.android import ApkInfo
|
||||||
|
|
||||||
from wa import Instrument, Parameter, very_fast
|
from wa import Instrument, Parameter, very_fast
|
||||||
from wa.framework.exception import ConfigError
|
from wa.framework.exception import ConfigError
|
||||||
from wa.framework.instrument import slow
|
from wa.framework.instrument import slow
|
||||||
|
from wa.utils.diff import diff_sysfs_dirs, diff_interrupt_files
|
||||||
from wa.utils.misc import as_relative, diff_tokens, write_table
|
from wa.utils.misc import as_relative, diff_tokens, write_table
|
||||||
from wa.utils.misc import ensure_file_directory_exists as _f
|
from wa.utils.misc import ensure_file_directory_exists as _f
|
||||||
from wa.utils.misc import ensure_directory_exists as _d
|
from wa.utils.misc import ensure_directory_exists as _d
|
||||||
@ -112,7 +113,7 @@ class SysfsExtractor(Instrument):
|
|||||||
_d(os.path.join(context.output_directory, 'diff', self._local_dir(d)))
|
_d(os.path.join(context.output_directory, 'diff', self._local_dir(d)))
|
||||||
for d in self.paths
|
for d in self.paths
|
||||||
]
|
]
|
||||||
self.device_and_host_paths = zip(self.paths, before_dirs, after_dirs, diff_dirs)
|
self.device_and_host_paths = list(zip(self.paths, before_dirs, after_dirs, diff_dirs))
|
||||||
|
|
||||||
if self.use_tmpfs:
|
if self.use_tmpfs:
|
||||||
for d in self.paths:
|
for d in self.paths:
|
||||||
@ -177,7 +178,7 @@ class SysfsExtractor(Instrument):
|
|||||||
self.logger.error('sysfs files were not pulled from the device.')
|
self.logger.error('sysfs files were not pulled from the device.')
|
||||||
self.device_and_host_paths.remove(paths) # Path is removed to skip diffing it
|
self.device_and_host_paths.remove(paths) # Path is removed to skip diffing it
|
||||||
for _, before_dir, after_dir, diff_dir in self.device_and_host_paths:
|
for _, before_dir, after_dir, diff_dir in self.device_and_host_paths:
|
||||||
_diff_sysfs_dirs(before_dir, after_dir, diff_dir)
|
diff_sysfs_dirs(before_dir, after_dir, diff_dir)
|
||||||
|
|
||||||
def teardown(self, context):
|
def teardown(self, context):
|
||||||
self._one_time_setup_done = []
|
self._one_time_setup_done = []
|
||||||
@ -280,7 +281,7 @@ class InterruptStatsInstrument(Instrument):
|
|||||||
def update_output(self, context):
|
def update_output(self, context):
|
||||||
# If workload execution failed, the after_file may not have been created.
|
# If workload execution failed, the after_file may not have been created.
|
||||||
if os.path.isfile(self.after_file):
|
if os.path.isfile(self.after_file):
|
||||||
_diff_interrupt_files(self.before_file, self.after_file, _f(self.diff_file))
|
diff_interrupt_files(self.before_file, self.after_file, _f(self.diff_file))
|
||||||
|
|
||||||
|
|
||||||
class DynamicFrequencyInstrument(SysfsExtractor):
|
class DynamicFrequencyInstrument(SysfsExtractor):
|
||||||
@ -307,83 +308,3 @@ class DynamicFrequencyInstrument(SysfsExtractor):
|
|||||||
super(DynamicFrequencyInstrument, self).validate()
|
super(DynamicFrequencyInstrument, self).validate()
|
||||||
if not self.tmpfs_mount_point.endswith('-cpufreq'): # pylint: disable=access-member-before-definition
|
if not self.tmpfs_mount_point.endswith('-cpufreq'): # pylint: disable=access-member-before-definition
|
||||||
self.tmpfs_mount_point += '-cpufreq'
|
self.tmpfs_mount_point += '-cpufreq'
|
||||||
|
|
||||||
|
|
||||||
def _diff_interrupt_files(before, after, result): # pylint: disable=R0914
|
|
||||||
output_lines = []
|
|
||||||
with open(before) as bfh:
|
|
||||||
with open(after) as ofh:
|
|
||||||
for bline, aline in izip(bfh, ofh):
|
|
||||||
bchunks = bline.strip().split()
|
|
||||||
while True:
|
|
||||||
achunks = aline.strip().split()
|
|
||||||
if achunks[0] == bchunks[0]:
|
|
||||||
diffchunks = ['']
|
|
||||||
diffchunks.append(achunks[0])
|
|
||||||
diffchunks.extend([diff_tokens(b, a) for b, a
|
|
||||||
in zip(bchunks[1:], achunks[1:])])
|
|
||||||
output_lines.append(diffchunks)
|
|
||||||
break
|
|
||||||
else: # new category appeared in the after file
|
|
||||||
diffchunks = ['>'] + achunks
|
|
||||||
output_lines.append(diffchunks)
|
|
||||||
try:
|
|
||||||
aline = ofh.next()
|
|
||||||
except StopIteration:
|
|
||||||
break
|
|
||||||
|
|
||||||
# Offset heading columns by one to allow for row labels on subsequent
|
|
||||||
# lines.
|
|
||||||
output_lines[0].insert(0, '')
|
|
||||||
|
|
||||||
# Any "columns" that do not have headings in the first row are not actually
|
|
||||||
# columns -- they are a single column where space-spearated words got
|
|
||||||
# split. Merge them back together to prevent them from being
|
|
||||||
# column-aligned by write_table.
|
|
||||||
table_rows = [output_lines[0]]
|
|
||||||
num_cols = len(output_lines[0])
|
|
||||||
for row in output_lines[1:]:
|
|
||||||
table_row = row[:num_cols]
|
|
||||||
table_row.append(' '.join(row[num_cols:]))
|
|
||||||
table_rows.append(table_row)
|
|
||||||
|
|
||||||
with open(result, 'w') as wfh:
|
|
||||||
write_table(table_rows, wfh)
|
|
||||||
|
|
||||||
|
|
||||||
def _diff_sysfs_dirs(before, after, result): # pylint: disable=R0914
|
|
||||||
before_files = []
|
|
||||||
os.path.walk(before,
|
|
||||||
lambda arg, dirname, names: arg.extend([os.path.join(dirname, f) for f in names]),
|
|
||||||
before_files
|
|
||||||
)
|
|
||||||
before_files = filter(os.path.isfile, before_files)
|
|
||||||
files = [os.path.relpath(f, before) for f in before_files]
|
|
||||||
after_files = [os.path.join(after, f) for f in files]
|
|
||||||
diff_files = [os.path.join(result, f) for f in files]
|
|
||||||
|
|
||||||
for bfile, afile, dfile in zip(before_files, after_files, diff_files):
|
|
||||||
if not os.path.isfile(afile):
|
|
||||||
logger.debug('sysfs_diff: {} does not exist or is not a file'.format(afile))
|
|
||||||
continue
|
|
||||||
|
|
||||||
with open(bfile) as bfh, open(afile) as afh: # pylint: disable=C0321
|
|
||||||
with open(_f(dfile), 'w') as dfh:
|
|
||||||
for i, (bline, aline) in enumerate(izip_longest(bfh, afh), 1):
|
|
||||||
if aline is None:
|
|
||||||
logger.debug('Lines missing from {}'.format(afile))
|
|
||||||
break
|
|
||||||
bchunks = re.split(r'(\W+)', bline)
|
|
||||||
achunks = re.split(r'(\W+)', aline)
|
|
||||||
if len(bchunks) != len(achunks):
|
|
||||||
logger.debug('Token length mismatch in {} on line {}'.format(bfile, i))
|
|
||||||
dfh.write('xxx ' + bline)
|
|
||||||
continue
|
|
||||||
if ((len([c for c in bchunks if c.strip()]) == len([c for c in achunks if c.strip()]) == 2) and
|
|
||||||
(bchunks[0] == achunks[0])):
|
|
||||||
# if there are only two columns and the first column is the
|
|
||||||
# same, assume it's a "header" column and do not diff it.
|
|
||||||
dchunks = [bchunks[0]] + [diff_tokens(b, a) for b, a in zip(bchunks[1:], achunks[1:])]
|
|
||||||
else:
|
|
||||||
dchunks = [diff_tokens(b, a) for b, a in zip(bchunks, achunks)]
|
|
||||||
dfh.write(''.join(dchunks))
|
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
|
|
||||||
# pylint: disable=W0613,E1101
|
# pylint: disable=W0613,E1101
|
||||||
from __future__ import division
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from devlib import FtraceCollector
|
from devlib import FtraceCollector
|
||||||
|
@ -14,9 +14,10 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import csv
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from devlib.utils.csvutil import csvwriter
|
||||||
|
|
||||||
from wa import OutputProcessor, Parameter
|
from wa import OutputProcessor, Parameter
|
||||||
from wa.utils.types import list_of_strings
|
from wa.utils.types import list_of_strings
|
||||||
from wa.utils.cpustates import report_power_stats
|
from wa.utils.cpustates import report_power_stats
|
||||||
@ -29,14 +30,14 @@ def _get_cpustates_description():
|
|||||||
"""
|
"""
|
||||||
output_lines = []
|
output_lines = []
|
||||||
lines = iter(report_power_stats.__doc__.split('\n'))
|
lines = iter(report_power_stats.__doc__.split('\n'))
|
||||||
line = lines.next()
|
line = next(lines)
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
if line.strip().startswith(':param'):
|
if line.strip().startswith(':param'):
|
||||||
while line.strip():
|
while line.strip():
|
||||||
line = lines.next()
|
line = next(lines)
|
||||||
output_lines.append(line)
|
output_lines.append(line)
|
||||||
line = lines.next()
|
line = next(lines)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
break
|
break
|
||||||
return '\n'.join(output_lines)
|
return '\n'.join(output_lines)
|
||||||
@ -105,7 +106,7 @@ class CpuStatesProcessor(OutputProcessor):
|
|||||||
split_wfi_states=self.split_wfi_states,
|
split_wfi_states=self.split_wfi_states,
|
||||||
)
|
)
|
||||||
|
|
||||||
for report in reports.itervalues():
|
for report in reports.values():
|
||||||
output.add_artifact(report.name, report.filepath, kind='data')
|
output.add_artifact(report.name, report.filepath, kind='data')
|
||||||
|
|
||||||
iteration_id = (output.id, output.label, output.iteration)
|
iteration_id = (output.id, output.label, output.iteration)
|
||||||
@ -118,7 +119,7 @@ class CpuStatesProcessor(OutputProcessor):
|
|||||||
|
|
||||||
parallel_rows = []
|
parallel_rows = []
|
||||||
powerstate_rows = []
|
powerstate_rows = []
|
||||||
for iteration_id, reports in self.iteration_reports.iteritems():
|
for iteration_id, reports in self.iteration_reports.items():
|
||||||
job_id, workload, iteration = iteration_id
|
job_id, workload, iteration = iteration_id
|
||||||
parallel_report = reports['parallel-stats']
|
parallel_report = reports['parallel-stats']
|
||||||
powerstate_report = reports['power-state-stats']
|
powerstate_report = reports['power-state-stats']
|
||||||
@ -132,8 +133,7 @@ class CpuStatesProcessor(OutputProcessor):
|
|||||||
for s in stats])
|
for s in stats])
|
||||||
|
|
||||||
outpath = output.get_path('parallel-stats.csv')
|
outpath = output.get_path('parallel-stats.csv')
|
||||||
with open(outpath, 'w') as wfh:
|
with csvwriter(outpath) as writer:
|
||||||
writer = csv.writer(wfh)
|
|
||||||
writer.writerow(['id', 'workload', 'iteration', 'cluster',
|
writer.writerow(['id', 'workload', 'iteration', 'cluster',
|
||||||
'number_of_cores', 'total_time',
|
'number_of_cores', 'total_time',
|
||||||
'%time', '%running_time'])
|
'%time', '%running_time'])
|
||||||
@ -141,8 +141,7 @@ class CpuStatesProcessor(OutputProcessor):
|
|||||||
output.add_artifact('run-parallel-stats', outpath, kind='export')
|
output.add_artifact('run-parallel-stats', outpath, kind='export')
|
||||||
|
|
||||||
outpath = output.get_path('power-state-stats.csv')
|
outpath = output.get_path('power-state-stats.csv')
|
||||||
with open(outpath, 'w') as wfh:
|
with csvwriter(outpath) as writer:
|
||||||
writer = csv.writer(wfh)
|
|
||||||
headers = ['id', 'workload', 'iteration', 'state']
|
headers = ['id', 'workload', 'iteration', 'state']
|
||||||
headers += ['{} CPU{}'.format(c, i)
|
headers += ['{} CPU{}'.format(c, i)
|
||||||
for i, c in enumerate(powerstate_report.core_names)]
|
for i, c in enumerate(powerstate_report.core_names)]
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import csv
|
import sys
|
||||||
|
|
||||||
|
from devlib.utils.csvutil import csvwriter
|
||||||
|
|
||||||
from wa import OutputProcessor, Parameter
|
from wa import OutputProcessor, Parameter
|
||||||
from wa.framework.exception import ConfigError
|
from wa.framework.exception import ConfigError
|
||||||
@ -64,7 +66,7 @@ class CsvReportProcessor(OutputProcessor):
|
|||||||
classifiers = set([])
|
classifiers = set([])
|
||||||
for out in outputs:
|
for out in outputs:
|
||||||
for metric in out.metrics:
|
for metric in out.metrics:
|
||||||
classifiers.update(metric.classifiers.keys())
|
classifiers.update(list(metric.classifiers.keys()))
|
||||||
extra_columns = list(classifiers)
|
extra_columns = list(classifiers)
|
||||||
elif self.extra_columns:
|
elif self.extra_columns:
|
||||||
extra_columns = self.extra_columns
|
extra_columns = self.extra_columns
|
||||||
@ -72,8 +74,7 @@ class CsvReportProcessor(OutputProcessor):
|
|||||||
extra_columns = []
|
extra_columns = []
|
||||||
|
|
||||||
outfile = output.get_path('results.csv')
|
outfile = output.get_path('results.csv')
|
||||||
with open(outfile, 'wb') as wfh:
|
with csvwriter(outfile) as writer:
|
||||||
writer = csv.writer(wfh)
|
|
||||||
writer.writerow(['id', 'workload', 'iteration', 'metric', ] +
|
writer.writerow(['id', 'workload', 'iteration', 'metric', ] +
|
||||||
extra_columns + ['value', 'units'])
|
extra_columns + ['value', 'units'])
|
||||||
|
|
||||||
|
@ -49,8 +49,8 @@ class StatusTxtReporter(OutputProcessor):
|
|||||||
txt = '{}/{} iterations completed without error\n'
|
txt = '{}/{} iterations completed without error\n'
|
||||||
wfh.write(txt.format(counter[Status.OK], len(output.jobs)))
|
wfh.write(txt.format(counter[Status.OK], len(output.jobs)))
|
||||||
wfh.write('\n')
|
wfh.write('\n')
|
||||||
status_lines = [map(str, [o.id, o.label, o.iteration, o.status,
|
status_lines = [list(map(str, [o.id, o.label, o.iteration, o.status,
|
||||||
o.event_summary])
|
o.event_summary]))
|
||||||
for o in output.jobs]
|
for o in output.jobs]
|
||||||
write_table(status_lines, wfh, align='<<>><')
|
write_table(status_lines, wfh, align='<<>><')
|
||||||
|
|
||||||
|
@ -13,16 +13,17 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
from __future__ import division
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import csv
|
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
from ctypes import c_int32
|
from ctypes import c_int32
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
from devlib.utils.csvutil import create_writer, csvwriter
|
||||||
|
|
||||||
from wa.utils.trace_cmd import TraceCmdParser, trace_has_marker, TRACE_MARKER_START, TRACE_MARKER_STOP
|
from wa.utils.trace_cmd import TraceCmdParser, trace_has_marker, TRACE_MARKER_START, TRACE_MARKER_STOP
|
||||||
|
|
||||||
|
|
||||||
@ -114,7 +115,7 @@ class SystemPowerState(object):
|
|||||||
self.timestamp = None
|
self.timestamp = None
|
||||||
self.cpus = []
|
self.cpus = []
|
||||||
idle_state = -1 if no_idle else None
|
idle_state = -1 if no_idle else None
|
||||||
for _ in xrange(num_cores):
|
for _ in range(num_cores):
|
||||||
self.cpus.append(CpuPowerState(idle_state=idle_state))
|
self.cpus.append(CpuPowerState(idle_state=idle_state))
|
||||||
|
|
||||||
def copy(self):
|
def copy(self):
|
||||||
@ -331,8 +332,7 @@ class PowerStateTransitions(object):
|
|||||||
|
|
||||||
def __init__(self, output_directory):
|
def __init__(self, output_directory):
|
||||||
self.filepath = os.path.join(output_directory, 'state-transitions-timeline.csv')
|
self.filepath = os.path.join(output_directory, 'state-transitions-timeline.csv')
|
||||||
self._wfh = open(self.filepath, 'w')
|
self.writer, self._wfh = create_writer(self.filepath)
|
||||||
self.writer = csv.writer(self._wfh)
|
|
||||||
headers = ['timestamp', 'cpu_id', 'frequency', 'idle_state']
|
headers = ['timestamp', 'cpu_id', 'frequency', 'idle_state']
|
||||||
self.writer.writerow(headers)
|
self.writer.writerow(headers)
|
||||||
|
|
||||||
@ -360,8 +360,7 @@ class PowerStateTimeline(object):
|
|||||||
def __init__(self, output_directory, cpus):
|
def __init__(self, output_directory, cpus):
|
||||||
self.filepath = os.path.join(output_directory, 'power-state-timeline.csv')
|
self.filepath = os.path.join(output_directory, 'power-state-timeline.csv')
|
||||||
self.idle_state_names = {cpu.id: [s.name for s in cpu.cpuidle.states] for cpu in cpus}
|
self.idle_state_names = {cpu.id: [s.name for s in cpu.cpuidle.states] for cpu in cpus}
|
||||||
self._wfh = open(self.filepath, 'w')
|
self.writer, self._wfh = create_writer(self.filepath)
|
||||||
self.writer = csv.writer(self._wfh)
|
|
||||||
headers = ['ts'] + ['{} CPU{}'.format(cpu.name, cpu.id) for cpu in cpus]
|
headers = ['ts'] + ['{} CPU{}'.format(cpu.name, cpu.id) for cpu in cpus]
|
||||||
self.writer.writerow(headers)
|
self.writer.writerow(headers)
|
||||||
|
|
||||||
@ -405,7 +404,7 @@ class ParallelStats(object):
|
|||||||
clusters.append(cpu.cpufreq.related_cpus)
|
clusters.append(cpu.cpufreq.related_cpus)
|
||||||
|
|
||||||
for i, clust in enumerate(clusters):
|
for i, clust in enumerate(clusters):
|
||||||
self.clusters[i] = set(clust)
|
self.clusters[str(i)] = set(clust)
|
||||||
self.clusters['all'] = set([cpu.id for cpu in cpus])
|
self.clusters['all'] = set([cpu.id for cpu in cpus])
|
||||||
|
|
||||||
self.first_timestamp = None
|
self.first_timestamp = None
|
||||||
@ -419,7 +418,7 @@ class ParallelStats(object):
|
|||||||
delta = timestamp - self.last_timestamp
|
delta = timestamp - self.last_timestamp
|
||||||
active_cores = [i for i, c in enumerate(self.previous_states)
|
active_cores = [i for i, c in enumerate(self.previous_states)
|
||||||
if c and c[0] == -1]
|
if c and c[0] == -1]
|
||||||
for cluster, cluster_cores in self.clusters.iteritems():
|
for cluster, cluster_cores in self.clusters.items():
|
||||||
clust_active_cores = len(cluster_cores.intersection(active_cores))
|
clust_active_cores = len(cluster_cores.intersection(active_cores))
|
||||||
self.parallel_times[cluster][clust_active_cores] += delta
|
self.parallel_times[cluster][clust_active_cores] += delta
|
||||||
if clust_active_cores:
|
if clust_active_cores:
|
||||||
@ -438,7 +437,7 @@ class ParallelStats(object):
|
|||||||
total_time = self.last_timestamp - self.first_timestamp
|
total_time = self.last_timestamp - self.first_timestamp
|
||||||
for cluster in sorted(self.parallel_times):
|
for cluster in sorted(self.parallel_times):
|
||||||
running_time = self.running_times[cluster]
|
running_time = self.running_times[cluster]
|
||||||
for n in xrange(len(self.clusters[cluster]) + 1):
|
for n in range(len(self.clusters[cluster]) + 1):
|
||||||
time = self.parallel_times[cluster][n]
|
time = self.parallel_times[cluster][n]
|
||||||
time_pc = time / total_time
|
time_pc = time / total_time
|
||||||
if not self.use_ratios:
|
if not self.use_ratios:
|
||||||
@ -474,8 +473,7 @@ class ParallelReport(object):
|
|||||||
self.values.append(value)
|
self.values.append(value)
|
||||||
|
|
||||||
def write(self):
|
def write(self):
|
||||||
with open(self.filepath, 'w') as wfh:
|
with csvwriter(self.filepath) as writer:
|
||||||
writer = csv.writer(wfh)
|
|
||||||
writer.writerow(['cluster', 'number_of_cores', 'total_time', '%time', '%running_time'])
|
writer.writerow(['cluster', 'number_of_cores', 'total_time', '%time', '%running_time'])
|
||||||
writer.writerows(self.values)
|
writer.writerows(self.values)
|
||||||
|
|
||||||
@ -520,7 +518,7 @@ class PowerStateStats(object):
|
|||||||
total_time = self.last_timestamp - self.first_timestamp
|
total_time = self.last_timestamp - self.first_timestamp
|
||||||
state_stats = defaultdict(lambda: [None] * len(self.core_names))
|
state_stats = defaultdict(lambda: [None] * len(self.core_names))
|
||||||
|
|
||||||
for cpu, states in self.cpu_states.iteritems():
|
for cpu, states in self.cpu_states.items():
|
||||||
for state in states:
|
for state in states:
|
||||||
time = states[state]
|
time = states[state]
|
||||||
time_pc = time / total_time
|
time_pc = time / total_time
|
||||||
@ -543,8 +541,7 @@ class PowerStateStatsReport(object):
|
|||||||
self.precision = precision
|
self.precision = precision
|
||||||
|
|
||||||
def write(self):
|
def write(self):
|
||||||
with open(self.filepath, 'w') as wfh:
|
with csvwriter(self.filepath) as writer:
|
||||||
writer = csv.writer(wfh)
|
|
||||||
headers = ['state'] + ['{} CPU{}'.format(c, i)
|
headers = ['state'] + ['{} CPU{}'.format(c, i)
|
||||||
for i, c in enumerate(self.core_names)]
|
for i, c in enumerate(self.core_names)]
|
||||||
writer.writerow(headers)
|
writer.writerow(headers)
|
||||||
@ -561,8 +558,7 @@ class CpuUtilizationTimeline(object):
|
|||||||
|
|
||||||
def __init__(self, output_directory, cpus):
|
def __init__(self, output_directory, cpus):
|
||||||
self.filepath = os.path.join(output_directory, 'utilization-timeline.csv')
|
self.filepath = os.path.join(output_directory, 'utilization-timeline.csv')
|
||||||
self._wfh = open(self.filepath, 'w')
|
self.writer, self._wfh = create_writer(self.filepath)
|
||||||
self.writer = csv.writer(self._wfh)
|
|
||||||
|
|
||||||
headers = ['ts'] + ['{} CPU{}'.format(cpu.name, cpu.id) for cpu in cpus]
|
headers = ['ts'] + ['{} CPU{}'.format(cpu.name, cpu.id) for cpu in cpus]
|
||||||
self.writer.writerow(headers)
|
self.writer.writerow(headers)
|
||||||
|
@ -1,11 +1,18 @@
|
|||||||
from wa.utils.misc import write_table
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
from future.moves.itertools import zip_longest
|
||||||
|
|
||||||
|
from wa.utils.misc import as_relative, diff_tokens, write_table
|
||||||
|
from wa.utils.misc import ensure_file_directory_exists as _f
|
||||||
|
from wa.utils.misc import ensure_directory_exists as _d
|
||||||
|
|
||||||
|
|
||||||
def diff_interrupt_files(before, after, result): # pylint: disable=R0914
|
def diff_interrupt_files(before, after, result): # pylint: disable=R0914
|
||||||
output_lines = []
|
output_lines = []
|
||||||
with open(before) as bfh:
|
with open(before) as bfh:
|
||||||
with open(after) as ofh:
|
with open(after) as ofh:
|
||||||
for bline, aline in izip(bfh, ofh):
|
for bline, aline in zip(bfh, ofh):
|
||||||
bchunks = bline.strip().split()
|
bchunks = bline.strip().split()
|
||||||
while True:
|
while True:
|
||||||
achunks = aline.strip().split()
|
achunks = aline.strip().split()
|
||||||
@ -20,7 +27,7 @@ def diff_interrupt_files(before, after, result): # pylint: disable=R0914
|
|||||||
diffchunks = ['>'] + achunks
|
diffchunks = ['>'] + achunks
|
||||||
output_lines.append(diffchunks)
|
output_lines.append(diffchunks)
|
||||||
try:
|
try:
|
||||||
aline = ofh.next()
|
aline = next(ofh)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -45,11 +52,9 @@ def diff_interrupt_files(before, after, result): # pylint: disable=R0914
|
|||||||
|
|
||||||
def diff_sysfs_dirs(before, after, result): # pylint: disable=R0914
|
def diff_sysfs_dirs(before, after, result): # pylint: disable=R0914
|
||||||
before_files = []
|
before_files = []
|
||||||
os.path.walk(before,
|
for root, dirs, files in os.walk(before):
|
||||||
lambda arg, dirname, names: arg.extend([os.path.join(dirname, f) for f in names]),
|
before_files.extend([os.path.join(root, f) for f in files])
|
||||||
before_files
|
before_files = list(filter(os.path.isfile, before_files))
|
||||||
)
|
|
||||||
before_files = filter(os.path.isfile, before_files)
|
|
||||||
files = [os.path.relpath(f, before) for f in before_files]
|
files = [os.path.relpath(f, before) for f in before_files]
|
||||||
after_files = [os.path.join(after, f) for f in files]
|
after_files = [os.path.join(after, f) for f in files]
|
||||||
diff_files = [os.path.join(result, f) for f in files]
|
diff_files = [os.path.join(result, f) for f in files]
|
||||||
@ -61,7 +66,7 @@ def diff_sysfs_dirs(before, after, result): # pylint: disable=R0914
|
|||||||
|
|
||||||
with open(bfile) as bfh, open(afile) as afh: # pylint: disable=C0321
|
with open(bfile) as bfh, open(afile) as afh: # pylint: disable=C0321
|
||||||
with open(_f(dfile), 'w') as dfh:
|
with open(_f(dfile), 'w') as dfh:
|
||||||
for i, (bline, aline) in enumerate(izip_longest(bfh, afh), 1):
|
for i, (bline, aline) in enumerate(zip_longest(bfh, afh), 1):
|
||||||
if aline is None:
|
if aline is None:
|
||||||
logger.debug('Lines missing from {}'.format(afile))
|
logger.debug('Lines missing from {}'.format(afile))
|
||||||
break
|
break
|
||||||
|
@ -164,16 +164,16 @@ def format_simple_table(rows, headers=None, align='>', show_borders=True, border
|
|||||||
"""Formats a simple table."""
|
"""Formats a simple table."""
|
||||||
if not rows:
|
if not rows:
|
||||||
return ''
|
return ''
|
||||||
rows = [map(str, r) for r in rows]
|
rows = [list(map(str, r)) for r in rows]
|
||||||
num_cols = len(rows[0])
|
num_cols = len(rows[0])
|
||||||
|
|
||||||
# cycle specified alignments until we have num_cols of them. This is
|
# cycle specified alignments until we have num_cols of them. This is
|
||||||
# consitent with how such cases are handled in R, pandas, etc.
|
# consitent with how such cases are handled in R, pandas, etc.
|
||||||
it = cycle(align)
|
it = cycle(align)
|
||||||
align = [it.next() for _ in xrange(num_cols)]
|
align = [next(it) for _ in range(num_cols)]
|
||||||
|
|
||||||
cols = zip(*rows)
|
cols = list(zip(*rows))
|
||||||
col_widths = [max(map(len, c)) for c in cols]
|
col_widths = [max(list(map(len, c))) for c in cols]
|
||||||
if headers:
|
if headers:
|
||||||
col_widths = [max(len(h), cw) for h, cw in zip(headers, col_widths)]
|
col_widths = [max(len(h), cw) for h, cw in zip(headers, col_widths)]
|
||||||
row_format = ' '.join(['{:%s%s}' % (align[i], w) for i, w in enumerate(col_widths)])
|
row_format = ' '.join(['{:%s%s}' % (align[i], w) for i, w in enumerate(col_widths)])
|
||||||
@ -259,12 +259,12 @@ def indent(text, spaces=4):
|
|||||||
|
|
||||||
|
|
||||||
def format_literal(lit):
|
def format_literal(lit):
|
||||||
if isinstance(lit, basestring):
|
if isinstance(lit, str):
|
||||||
return '``\'{}\'``'.format(lit)
|
return '``\'{}\'``'.format(lit)
|
||||||
elif hasattr(lit, 'pattern'): # regex
|
elif hasattr(lit, 'pattern'): # regex
|
||||||
return '``r\'{}\'``'.format(lit.pattern)
|
return '``r\'{}\'``'.format(lit.pattern)
|
||||||
elif isinstance(lit, dict):
|
elif isinstance(lit, dict):
|
||||||
content = indent(',\n'.join("{}: {}".format(key,val) for (key,val) in lit.iteritems()))
|
content = indent(',\n'.join("{}: {}".format(key,val) for (key,val) in lit.items()))
|
||||||
return '::\n\n{}'.format(indent('{{\n{}\n}}'.format(content)))
|
return '::\n\n{}'.format(indent('{{\n{}\n}}'.format(content)))
|
||||||
else:
|
else:
|
||||||
return '``{}``'.format(lit)
|
return '``{}``'.format(lit)
|
||||||
@ -287,7 +287,7 @@ def get_params_rst(parameters):
|
|||||||
text += indent('\nconstraint: ``{}``\n'.format(get_type_name(param.constraint)))
|
text += indent('\nconstraint: ``{}``\n'.format(get_type_name(param.constraint)))
|
||||||
if param.default:
|
if param.default:
|
||||||
value = param.default
|
value = param.default
|
||||||
if isinstance(value, basestring) and value.startswith(USER_HOME):
|
if isinstance(value, str) and value.startswith(USER_HOME):
|
||||||
value = value.replace(USER_HOME, '~')
|
value = value.replace(USER_HOME, '~')
|
||||||
text += indent('\ndefault: {}\n'.format(format_literal(value)))
|
text += indent('\ndefault: {}\n'.format(format_literal(value)))
|
||||||
text += '\n'
|
text += '\n'
|
||||||
@ -298,7 +298,7 @@ def get_aliases_rst(aliases):
|
|||||||
text = ''
|
text = ''
|
||||||
for alias in aliases:
|
for alias in aliases:
|
||||||
param_str = ', '.join(['{}={}'.format(n, format_literal(v))
|
param_str = ', '.join(['{}={}'.format(n, format_literal(v))
|
||||||
for n, v in alias.params.iteritems()])
|
for n, v in alias.params.items()])
|
||||||
text += '{}\n{}\n\n'.format(alias.name, indent(param_str))
|
text += '{}\n{}\n\n'.format(alias.name, indent(param_str))
|
||||||
return text
|
return text
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ def activate_environment(name):
|
|||||||
#pylint: disable=W0603
|
#pylint: disable=W0603
|
||||||
global __active_environment
|
global __active_environment
|
||||||
|
|
||||||
if name not in __environments.keys():
|
if name not in list(__environments.keys()):
|
||||||
init_environment(name)
|
init_environment(name)
|
||||||
__active_environment = name
|
__active_environment = name
|
||||||
|
|
||||||
@ -24,7 +24,7 @@ def init_environment(name):
|
|||||||
:raises: ``ValueError`` if an environment with name ``name``
|
:raises: ``ValueError`` if an environment with name ``name``
|
||||||
already exists.
|
already exists.
|
||||||
"""
|
"""
|
||||||
if name in __environments.keys():
|
if name in list(__environments.keys()):
|
||||||
msg = "Environment {} already exists".format(name)
|
msg = "Environment {} already exists".format(name)
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
__environments[name] = []
|
__environments[name] = []
|
||||||
@ -39,7 +39,7 @@ def reset_environment(name=None):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if name is not None:
|
if name is not None:
|
||||||
if name not in __environments.keys():
|
if name not in list(__environments.keys()):
|
||||||
msg = "Environment {} does not exist".format(name)
|
msg = "Environment {} does not exist".format(name)
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
__environments[name] = []
|
__environments[name] = []
|
||||||
@ -75,7 +75,7 @@ def once_per_class(method):
|
|||||||
if __active_environment is None:
|
if __active_environment is None:
|
||||||
activate_environment('default')
|
activate_environment('default')
|
||||||
|
|
||||||
func_id = repr(method.func_name) + repr(args[0].__class__)
|
func_id = repr(method.__name__) + repr(args[0].__class__)
|
||||||
|
|
||||||
if func_id in __environments[__active_environment]:
|
if func_id in __environments[__active_environment]:
|
||||||
return
|
return
|
||||||
|
@ -128,13 +128,13 @@ def disable(logs):
|
|||||||
|
|
||||||
|
|
||||||
def __enable_logger(logger):
|
def __enable_logger(logger):
|
||||||
if isinstance(logger, basestring):
|
if isinstance(logger, str):
|
||||||
logger = logging.getLogger(logger)
|
logger = logging.getLogger(logger)
|
||||||
logger.propagate = True
|
logger.propagate = True
|
||||||
|
|
||||||
|
|
||||||
def __disable_logger(logger):
|
def __disable_logger(logger):
|
||||||
if isinstance(logger, basestring):
|
if isinstance(logger, str):
|
||||||
logger = logging.getLogger(logger)
|
logger = logging.getLogger(logger)
|
||||||
logger.propagate = False
|
logger.propagate = False
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
Miscellaneous functions that don't fit anywhere else.
|
Miscellaneous functions that don't fit anywhere else.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import division
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
@ -30,9 +30,13 @@ import traceback
|
|||||||
import logging
|
import logging
|
||||||
import random
|
import random
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import sys
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from operator import mul
|
from operator import mul
|
||||||
from StringIO import StringIO
|
if sys.version_info[0] == 3:
|
||||||
|
from io import StringIO
|
||||||
|
else:
|
||||||
|
from io import BytesIO as StringIO
|
||||||
from itertools import chain, cycle
|
from itertools import chain, cycle
|
||||||
from distutils.spawn import find_executable
|
from distutils.spawn import find_executable
|
||||||
|
|
||||||
@ -83,11 +87,11 @@ def diff_tokens(before_token, after_token):
|
|||||||
def prepare_table_rows(rows):
|
def prepare_table_rows(rows):
|
||||||
"""Given a list of lists, make sure they are prepared to be formatted into a table
|
"""Given a list of lists, make sure they are prepared to be formatted into a table
|
||||||
by making sure each row has the same number of columns and stringifying all values."""
|
by making sure each row has the same number of columns and stringifying all values."""
|
||||||
rows = [map(str, r) for r in rows]
|
rows = [list(map(str, r)) for r in rows]
|
||||||
max_cols = max(map(len, rows))
|
max_cols = max(list(map(len, rows)))
|
||||||
for row in rows:
|
for row in rows:
|
||||||
pad = max_cols - len(row)
|
pad = max_cols - len(row)
|
||||||
for _ in xrange(pad):
|
for _ in range(pad):
|
||||||
row.append('')
|
row.append('')
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
@ -102,10 +106,10 @@ def write_table(rows, wfh, align='>', headers=None): # pylint: disable=R0914
|
|||||||
# cycle specified alignments until we have max_cols of them. This is
|
# cycle specified alignments until we have max_cols of them. This is
|
||||||
# consitent with how such cases are handled in R, pandas, etc.
|
# consitent with how such cases are handled in R, pandas, etc.
|
||||||
it = cycle(align)
|
it = cycle(align)
|
||||||
align = [it.next() for _ in xrange(num_cols)]
|
align = [next(it) for _ in range(num_cols)]
|
||||||
|
|
||||||
cols = zip(*rows)
|
cols = list(zip(*rows))
|
||||||
col_widths = [max(map(len, c)) for c in cols]
|
col_widths = [max(list(map(len, c))) for c in cols]
|
||||||
row_format = ' '.join(['{:%s%s}' % (align[i], w) for i, w in enumerate(col_widths)])
|
row_format = ' '.join(['{:%s%s}' % (align[i], w) for i, w in enumerate(col_widths)])
|
||||||
row_format += '\n'
|
row_format += '\n'
|
||||||
|
|
||||||
@ -144,7 +148,7 @@ def _check_remove_item(the_list, item):
|
|||||||
"""Helper function for merge_lists that implements checking wether an items
|
"""Helper function for merge_lists that implements checking wether an items
|
||||||
should be removed from the list and doing so if needed. Returns ``True`` if
|
should be removed from the list and doing so if needed. Returns ``True`` if
|
||||||
the item has been removed and ``False`` otherwise."""
|
the item has been removed and ``False`` otherwise."""
|
||||||
if not isinstance(item, basestring):
|
if not isinstance(item, str):
|
||||||
return False
|
return False
|
||||||
if not item.startswith('~'):
|
if not item.startswith('~'):
|
||||||
return False
|
return False
|
||||||
@ -275,7 +279,7 @@ def get_article(word):
|
|||||||
|
|
||||||
def get_random_string(length):
|
def get_random_string(length):
|
||||||
"""Returns a random ASCII string of the specified length)."""
|
"""Returns a random ASCII string of the specified length)."""
|
||||||
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in xrange(length))
|
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(length))
|
||||||
|
|
||||||
|
|
||||||
class LoadSyntaxError(Exception):
|
class LoadSyntaxError(Exception):
|
||||||
@ -307,9 +311,9 @@ def load_struct_from_python(filepath=None, text=None):
|
|||||||
while modname in sys.modules: # highly unlikely, but...
|
while modname in sys.modules: # highly unlikely, but...
|
||||||
modname = get_random_string(RAND_MOD_NAME_LEN)
|
modname = get_random_string(RAND_MOD_NAME_LEN)
|
||||||
mod = imp.new_module(modname)
|
mod = imp.new_module(modname)
|
||||||
exec text in mod.__dict__ # pylint: disable=exec-used
|
exec(text, mod.__dict__) # pylint: disable=exec-used
|
||||||
return dict((k, v)
|
return dict((k, v)
|
||||||
for k, v in mod.__dict__.iteritems()
|
for k, v in mod.__dict__.items()
|
||||||
if not k.startswith('_'))
|
if not k.startswith('_'))
|
||||||
except SyntaxError as e:
|
except SyntaxError as e:
|
||||||
raise LoadSyntaxError(e.message, filepath, e.lineno)
|
raise LoadSyntaxError(e.message, filepath, e.lineno)
|
||||||
@ -404,7 +408,7 @@ def istextfile(fileobj, blocksize=512):
|
|||||||
def categorize(v):
|
def categorize(v):
|
||||||
if hasattr(v, 'merge_with') and hasattr(v, 'merge_into'):
|
if hasattr(v, 'merge_with') and hasattr(v, 'merge_into'):
|
||||||
return 'o'
|
return 'o'
|
||||||
elif hasattr(v, 'iteritems'):
|
elif hasattr(v, 'items'):
|
||||||
return 'm'
|
return 'm'
|
||||||
elif isiterable(v):
|
elif isiterable(v):
|
||||||
return 's'
|
return 's'
|
||||||
@ -515,13 +519,14 @@ def merge_sequencies(s1, s2):
|
|||||||
return type(s2)(unique(chain(s1, s2)))
|
return type(s2)(unique(chain(s1, s2)))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def merge_maps(m1, m2):
|
def merge_maps(m1, m2):
|
||||||
return type(m2)(chain(m1.iteritems(), m2.iteritems()))
|
return type(m2)(chain(iter(m1.items()), iter(m2.items())))
|
||||||
|
|
||||||
|
|
||||||
def merge_dicts_simple(base, other):
|
def merge_dicts_simple(base, other):
|
||||||
result = base.copy()
|
result = base.copy()
|
||||||
for key, value in (other or {}).iteritems():
|
for key, value in (other or {}).items():
|
||||||
result[key] = merge_config_values(result.get(key), value)
|
result[key] = merge_config_values(result.get(key), value)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -534,11 +539,11 @@ def touch(path):
|
|||||||
def get_object_name(obj):
|
def get_object_name(obj):
|
||||||
if hasattr(obj, 'name'):
|
if hasattr(obj, 'name'):
|
||||||
return obj.name
|
return obj.name
|
||||||
elif hasattr(obj, 'im_func'):
|
elif hasattr(obj, '__func__') and hasattr(obj, '__self__'):
|
||||||
return '{}.{}'.format(get_object_name(obj.im_class),
|
return '{}.{}'.format(get_object_name(obj.__self__.__class__),
|
||||||
obj.im_func.func_name)
|
obj.__func__.__name__)
|
||||||
elif hasattr(obj, 'func_name'):
|
elif hasattr(obj, 'func_name'):
|
||||||
return obj.func_name
|
return obj.__name__
|
||||||
elif hasattr(obj, '__name__'):
|
elif hasattr(obj, '__name__'):
|
||||||
return obj.__name__
|
return obj.__name__
|
||||||
elif hasattr(obj, '__class__'):
|
elif hasattr(obj, '__class__'):
|
||||||
@ -557,7 +562,7 @@ def resolve_cpus(name, target):
|
|||||||
- 'all' - returns all cpus
|
- 'all' - returns all cpus
|
||||||
- '' - Empty name will also return all cpus
|
- '' - Empty name will also return all cpus
|
||||||
"""
|
"""
|
||||||
cpu_list = range(target.number_of_cpus)
|
cpu_list = list(range(target.number_of_cpus))
|
||||||
|
|
||||||
# Support for passing cpu no directly
|
# Support for passing cpu no directly
|
||||||
if isinstance(name, int):
|
if isinstance(name, int):
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
from __future__ import division
|
|
||||||
import os
|
import os
|
||||||
import struct
|
import struct
|
||||||
import signal
|
import signal
|
||||||
@ -88,7 +88,7 @@ class UinputDeviceInfo(object):
|
|||||||
self.abs_bits = bytearray(parts[3])
|
self.abs_bits = bytearray(parts[3])
|
||||||
self.num_absinfo = parts[4]
|
self.num_absinfo = parts[4]
|
||||||
self.absinfo = [absinfo(*read_struct(fh, absinfo_struct))
|
self.absinfo = [absinfo(*read_struct(fh, absinfo_struct))
|
||||||
for _ in xrange(self.num_absinfo)]
|
for _ in range(self.num_absinfo)]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return 'UInputInfo({})'.format(self.__dict__)
|
return 'UInputInfo({})'.format(self.__dict__)
|
||||||
@ -145,7 +145,7 @@ class ReventRecording(object):
|
|||||||
if self.stream:
|
if self.stream:
|
||||||
events = self._iter_events()
|
events = self._iter_events()
|
||||||
try:
|
try:
|
||||||
first = last = events.next()
|
first = last = next(events)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
self._duration = 0
|
self._duration = 0
|
||||||
for last in events:
|
for last in events:
|
||||||
@ -230,7 +230,7 @@ class ReventRecording(object):
|
|||||||
|
|
||||||
def _read_devices(self, fh):
|
def _read_devices(self, fh):
|
||||||
num_devices, = read_struct(fh, u32_struct)
|
num_devices, = read_struct(fh, u32_struct)
|
||||||
for _ in xrange(num_devices):
|
for _ in range(num_devices):
|
||||||
self.device_paths.append(read_string(fh))
|
self.device_paths.append(read_string(fh))
|
||||||
|
|
||||||
def _read_gamepad_info(self, fh):
|
def _read_gamepad_info(self, fh):
|
||||||
@ -243,7 +243,7 @@ class ReventRecording(object):
|
|||||||
raise RuntimeError(msg)
|
raise RuntimeError(msg)
|
||||||
self.fh.seek(self._events_start)
|
self.fh.seek(self._events_start)
|
||||||
if self.version >= 2:
|
if self.version >= 2:
|
||||||
for _ in xrange(self.num_events):
|
for _ in range(self.num_events):
|
||||||
yield ReventEvent(self.fh)
|
yield ReventEvent(self.fh)
|
||||||
else:
|
else:
|
||||||
file_size = os.path.getsize(self.filepath)
|
file_size = os.path.getsize(self.filepath)
|
||||||
|
@ -71,7 +71,7 @@ POD_TYPES = [
|
|||||||
dict,
|
dict,
|
||||||
set,
|
set,
|
||||||
str,
|
str,
|
||||||
unicode,
|
str,
|
||||||
int,
|
int,
|
||||||
float,
|
float,
|
||||||
bool,
|
bool,
|
||||||
@ -104,7 +104,7 @@ class WAJSONDecoder(_json.JSONDecoder):
|
|||||||
d = _json.JSONDecoder.decode(self, s, **kwargs)
|
d = _json.JSONDecoder.decode(self, s, **kwargs)
|
||||||
|
|
||||||
def try_parse_object(v):
|
def try_parse_object(v):
|
||||||
if isinstance(v, basestring):
|
if isinstance(v, str):
|
||||||
if v.startswith('REGEX:'):
|
if v.startswith('REGEX:'):
|
||||||
_, flags, pattern = v.split(':', 2)
|
_, flags, pattern = v.split(':', 2)
|
||||||
return re.compile(pattern, int(flags or 0))
|
return re.compile(pattern, int(flags or 0))
|
||||||
@ -122,8 +122,8 @@ class WAJSONDecoder(_json.JSONDecoder):
|
|||||||
|
|
||||||
def load_objects(d):
|
def load_objects(d):
|
||||||
pairs = []
|
pairs = []
|
||||||
for k, v in d.iteritems():
|
for k, v in d.items():
|
||||||
if hasattr(v, 'iteritems'):
|
if hasattr(v, 'items'):
|
||||||
pairs.append((k, load_objects(v)))
|
pairs.append((k, load_objects(v)))
|
||||||
elif isiterable(v):
|
elif isiterable(v):
|
||||||
pairs.append((k, [try_parse_object(i) for i in v]))
|
pairs.append((k, [try_parse_object(i) for i in v]))
|
||||||
@ -160,13 +160,13 @@ class json(object):
|
|||||||
|
|
||||||
|
|
||||||
_mapping_tag = _yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
|
_mapping_tag = _yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
|
||||||
_regex_tag = u'tag:wa:regex'
|
_regex_tag = 'tag:wa:regex'
|
||||||
_level_tag = u'tag:wa:level'
|
_level_tag = 'tag:wa:level'
|
||||||
_cpu_mask_tag = u'tag:wa:cpu_mask'
|
_cpu_mask_tag = 'tag:wa:cpu_mask'
|
||||||
|
|
||||||
|
|
||||||
def _wa_dict_representer(dumper, data):
|
def _wa_dict_representer(dumper, data):
|
||||||
return dumper.represent_mapping(_mapping_tag, data.iteritems())
|
return dumper.represent_mapping(_mapping_tag, iter(data.items()))
|
||||||
|
|
||||||
|
|
||||||
def _wa_regex_representer(dumper, data):
|
def _wa_regex_representer(dumper, data):
|
||||||
@ -248,17 +248,17 @@ class python(object):
|
|||||||
def loads(s, *args, **kwargs):
|
def loads(s, *args, **kwargs):
|
||||||
pod = {}
|
pod = {}
|
||||||
try:
|
try:
|
||||||
exec s in pod # pylint: disable=exec-used
|
exec(s, pod) # pylint: disable=exec-used
|
||||||
except SyntaxError as e:
|
except SyntaxError as e:
|
||||||
raise SerializerSyntaxError(e.message, e.lineno)
|
raise SerializerSyntaxError(e.message, e.lineno)
|
||||||
for k in pod.keys():
|
for k in list(pod.keys()):
|
||||||
if k.startswith('__'):
|
if k.startswith('__'):
|
||||||
del pod[k]
|
del pod[k]
|
||||||
return pod
|
return pod
|
||||||
|
|
||||||
|
|
||||||
def read_pod(source, fmt=None):
|
def read_pod(source, fmt=None):
|
||||||
if isinstance(source, basestring):
|
if isinstance(source, str):
|
||||||
with open(source) as fh:
|
with open(source) as fh:
|
||||||
return _read_pod(fh, fmt)
|
return _read_pod(fh, fmt)
|
||||||
elif hasattr(source, 'read') and (hasattr(source, 'name') or fmt):
|
elif hasattr(source, 'read') and (hasattr(source, 'name') or fmt):
|
||||||
@ -269,7 +269,7 @@ def read_pod(source, fmt=None):
|
|||||||
|
|
||||||
|
|
||||||
def write_pod(pod, dest, fmt=None):
|
def write_pod(pod, dest, fmt=None):
|
||||||
if isinstance(dest, basestring):
|
if isinstance(dest, str):
|
||||||
with open(dest, 'w') as wfh:
|
with open(dest, 'w') as wfh:
|
||||||
return _write_pod(pod, wfh, fmt)
|
return _write_pod(pod, wfh, fmt)
|
||||||
elif hasattr(dest, 'write') and (hasattr(dest, 'name') or fmt):
|
elif hasattr(dest, 'write') and (hasattr(dest, 'name') or fmt):
|
||||||
@ -323,8 +323,8 @@ def _write_pod(pod, wfh, fmt=None):
|
|||||||
def is_pod(obj):
|
def is_pod(obj):
|
||||||
if type(obj) not in POD_TYPES:
|
if type(obj) not in POD_TYPES:
|
||||||
return False
|
return False
|
||||||
if hasattr(obj, 'iteritems'):
|
if hasattr(obj, 'items'):
|
||||||
for k, v in obj.iteritems():
|
for k, v in obj.items():
|
||||||
if not (is_pod(k) and is_pod(v)):
|
if not (is_pod(k) and is_pod(v)):
|
||||||
return False
|
return False
|
||||||
elif isiterable(obj):
|
elif isiterable(obj):
|
||||||
|
@ -89,5 +89,5 @@ def _get_terminal_size_linux():
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sizex, sizey = get_terminal_size()
|
sizex, sizey = get_terminal_size()
|
||||||
print 'width =', sizex, 'height =', sizey
|
print('width =', sizex, 'height =', sizey)
|
||||||
|
|
||||||
|
@ -114,7 +114,7 @@ class DroppedEventsEvent(object):
|
|||||||
def try_convert_to_numeric(v):
|
def try_convert_to_numeric(v):
|
||||||
try:
|
try:
|
||||||
if isiterable(v):
|
if isiterable(v):
|
||||||
return map(numeric, v)
|
return list(map(numeric, v))
|
||||||
else:
|
else:
|
||||||
return numeric(v)
|
return numeric(v)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@ -153,13 +153,13 @@ def regex_body_parser(regex, flags=0):
|
|||||||
If regex is a pre-compiled object, flags will be ignored.
|
If regex is a pre-compiled object, flags will be ignored.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if isinstance(regex, basestring):
|
if isinstance(regex, str):
|
||||||
regex = re.compile(regex, flags)
|
regex = re.compile(regex, flags)
|
||||||
|
|
||||||
def regex_parser_func(event, text):
|
def regex_parser_func(event, text):
|
||||||
match = regex.search(text)
|
match = regex.search(text)
|
||||||
if match:
|
if match:
|
||||||
for k, v in match.groupdict().iteritems():
|
for k, v in match.groupdict().items():
|
||||||
try:
|
try:
|
||||||
event.fields[k] = int(v)
|
event.fields[k] = int(v)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@ -321,7 +321,7 @@ class TraceCmdParser(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
body_parser = EVENT_PARSER_MAP.get(event_name, default_body_parser)
|
body_parser = EVENT_PARSER_MAP.get(event_name, default_body_parser)
|
||||||
if isinstance(body_parser, basestring) or isinstance(body_parser, re._pattern_type): # pylint: disable=protected-access
|
if isinstance(body_parser, str) or isinstance(body_parser, re._pattern_type): # pylint: disable=protected-access
|
||||||
body_parser = regex_body_parser(body_parser)
|
body_parser = regex_body_parser(body_parser)
|
||||||
yield TraceCmdEvent(parser=body_parser, **match.groupdict())
|
yield TraceCmdEvent(parser=body_parser, **match.groupdict())
|
||||||
|
|
||||||
|
@ -29,10 +29,19 @@ import os
|
|||||||
import re
|
import re
|
||||||
import numbers
|
import numbers
|
||||||
import shlex
|
import shlex
|
||||||
|
import sys
|
||||||
from bisect import insort
|
from bisect import insort
|
||||||
from urllib import quote, unquote
|
if sys.version_info[0] == 3:
|
||||||
|
from urllib.parse import quote, unquote
|
||||||
|
from past.builtins import basestring
|
||||||
|
long = int
|
||||||
|
else:
|
||||||
|
from urllib import quote, unquote
|
||||||
from collections import defaultdict, MutableMapping
|
from collections import defaultdict, MutableMapping
|
||||||
from copy import copy
|
from copy import copy
|
||||||
|
from functools import total_ordering
|
||||||
|
|
||||||
|
from future.utils import with_metaclass
|
||||||
|
|
||||||
from devlib.utils.types import identifier, boolean, integer, numeric, caseless_string
|
from devlib.utils.types import identifier, boolean, integer, numeric, caseless_string
|
||||||
|
|
||||||
@ -47,7 +56,7 @@ def list_of_strs(value):
|
|||||||
"""
|
"""
|
||||||
if not isiterable(value):
|
if not isiterable(value):
|
||||||
raise ValueError(value)
|
raise ValueError(value)
|
||||||
return map(str, value)
|
return list(map(str, value))
|
||||||
|
|
||||||
list_of_strings = list_of_strs
|
list_of_strings = list_of_strs
|
||||||
|
|
||||||
@ -59,7 +68,7 @@ def list_of_ints(value):
|
|||||||
"""
|
"""
|
||||||
if not isiterable(value):
|
if not isiterable(value):
|
||||||
raise ValueError(value)
|
raise ValueError(value)
|
||||||
return map(int, value)
|
return list(map(int, value))
|
||||||
|
|
||||||
list_of_integers = list_of_ints
|
list_of_integers = list_of_ints
|
||||||
|
|
||||||
@ -72,7 +81,7 @@ def list_of_numbers(value):
|
|||||||
"""
|
"""
|
||||||
if not isiterable(value):
|
if not isiterable(value):
|
||||||
raise ValueError(value)
|
raise ValueError(value)
|
||||||
return map(numeric, value)
|
return list(map(numeric, value))
|
||||||
|
|
||||||
|
|
||||||
def list_of_bools(value, interpret_strings=True):
|
def list_of_bools(value, interpret_strings=True):
|
||||||
@ -88,9 +97,9 @@ def list_of_bools(value, interpret_strings=True):
|
|||||||
if not isiterable(value):
|
if not isiterable(value):
|
||||||
raise ValueError(value)
|
raise ValueError(value)
|
||||||
if interpret_strings:
|
if interpret_strings:
|
||||||
return map(boolean, value)
|
return list(map(boolean, value))
|
||||||
else:
|
else:
|
||||||
return map(bool, value)
|
return list(map(bool, value))
|
||||||
|
|
||||||
|
|
||||||
def list_of(type_):
|
def list_of(type_):
|
||||||
@ -98,16 +107,16 @@ def list_of(type_):
|
|||||||
attempts to convert all elements in the passed value to the specifed
|
attempts to convert all elements in the passed value to the specifed
|
||||||
``type_``, raising ``ValueError`` on error."""
|
``type_``, raising ``ValueError`` on error."""
|
||||||
def __init__(self, values):
|
def __init__(self, values):
|
||||||
list.__init__(self, map(type_, values))
|
list.__init__(self, list(map(type_, values)))
|
||||||
|
|
||||||
def append(self, value):
|
def append(self, value):
|
||||||
list.append(self, type_(value))
|
list.append(self, type_(value))
|
||||||
|
|
||||||
def extend(self, other):
|
def extend(self, other):
|
||||||
list.extend(self, map(type_, other))
|
list.extend(self, list(map(type_, other)))
|
||||||
|
|
||||||
def from_pod(cls, pod):
|
def from_pod(cls, pod):
|
||||||
return cls(map(type_, pod))
|
return cls(list(map(type_, pod)))
|
||||||
|
|
||||||
def _to_pod(self):
|
def _to_pod(self):
|
||||||
return self
|
return self
|
||||||
@ -132,7 +141,7 @@ def list_or_string(value):
|
|||||||
a one-element list with stringified value will be returned.
|
a one-element list with stringified value will be returned.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, str):
|
||||||
return [value]
|
return [value]
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
@ -147,11 +156,11 @@ def list_or_caseless_string(value):
|
|||||||
not iterable a one-element list with stringified value will be returned.
|
not iterable a one-element list with stringified value will be returned.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, str):
|
||||||
return [caseless_string(value)]
|
return [caseless_string(value)]
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
return map(caseless_string, value)
|
return list(map(caseless_string, value))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return [caseless_string(value)]
|
return [caseless_string(value)]
|
||||||
|
|
||||||
@ -229,8 +238,8 @@ class arguments(list):
|
|||||||
|
|
||||||
def __init__(self, value=None):
|
def __init__(self, value=None):
|
||||||
if isiterable(value):
|
if isiterable(value):
|
||||||
super(arguments, self).__init__(map(str, value))
|
super(arguments, self).__init__(list(map(str, value)))
|
||||||
elif isinstance(value, basestring):
|
elif isinstance(value, str):
|
||||||
posix = os.name != 'nt'
|
posix = os.name != 'nt'
|
||||||
super(arguments, self).__init__(shlex.split(value, posix=posix))
|
super(arguments, self).__init__(shlex.split(value, posix=posix))
|
||||||
elif value is None:
|
elif value is None:
|
||||||
@ -242,7 +251,7 @@ class arguments(list):
|
|||||||
return super(arguments, self).append(str(value))
|
return super(arguments, self).append(str(value))
|
||||||
|
|
||||||
def extend(self, values):
|
def extend(self, values):
|
||||||
return super(arguments, self).extend(map(str, values))
|
return super(arguments, self).extend(list(map(str, values)))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return ' '.join(self)
|
return ' '.join(self)
|
||||||
@ -288,7 +297,7 @@ class prioritylist(object):
|
|||||||
self.__delitem__(index)
|
self.__delitem__(index)
|
||||||
|
|
||||||
def _priority_index(self, element):
|
def _priority_index(self, element):
|
||||||
for priority, elements in self.elements.iteritems():
|
for priority, elements in self.elements.items():
|
||||||
if element in elements:
|
if element in elements:
|
||||||
return (priority, elements.index(element))
|
return (priority, elements.index(element))
|
||||||
raise IndexError(element)
|
raise IndexError(element)
|
||||||
@ -333,7 +342,7 @@ class prioritylist(object):
|
|||||||
else:
|
else:
|
||||||
index_range = [index]
|
index_range = [index]
|
||||||
elif isinstance(index, slice):
|
elif isinstance(index, slice):
|
||||||
index_range = range(index.start or 0, index.stop, index.step or 1)
|
index_range = list(range(index.start or 0, index.stop, index.step or 1))
|
||||||
else:
|
else:
|
||||||
raise ValueError('Invalid index {}'.format(index))
|
raise ValueError('Invalid index {}'.format(index))
|
||||||
current_global_offset = 0
|
current_global_offset = 0
|
||||||
@ -391,7 +400,7 @@ class toggle_set(set):
|
|||||||
def __init__(self, *args):
|
def __init__(self, *args):
|
||||||
if args:
|
if args:
|
||||||
value = args[0]
|
value = args[0]
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, str):
|
||||||
msg = 'invalid type for toggle_set: "{}"'
|
msg = 'invalid type for toggle_set: "{}"'
|
||||||
raise TypeError(msg.format(type(value)))
|
raise TypeError(msg.format(type(value)))
|
||||||
set.__init__(self, *args)
|
set.__init__(self, *args)
|
||||||
@ -507,12 +516,15 @@ class obj_dict(MutableMapping):
|
|||||||
raise AttributeError("No such attribute: " + name)
|
raise AttributeError("No such attribute: " + name)
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
|
if 'dict' not in self.__dict__:
|
||||||
|
raise AttributeError("No such attribute: " + name)
|
||||||
if name in self.__dict__['dict']:
|
if name in self.__dict__['dict']:
|
||||||
return self.__dict__['dict'][name]
|
return self.__dict__['dict'][name]
|
||||||
else:
|
else:
|
||||||
raise AttributeError("No such attribute: " + name)
|
raise AttributeError("No such attribute: " + name)
|
||||||
|
|
||||||
|
|
||||||
|
@total_ordering
|
||||||
class level(object):
|
class level(object):
|
||||||
"""
|
"""
|
||||||
A level has a name and behaves like a string when printed, however it also
|
A level has a name and behaves like a string when printed, however it also
|
||||||
@ -538,11 +550,8 @@ class level(object):
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '{}({})'.format(self.name, self.value)
|
return '{}({})'.format(self.name, self.value)
|
||||||
|
|
||||||
def __cmp__(self, other):
|
def __hash__(self):
|
||||||
if isinstance(other, level):
|
return hash(self.name)
|
||||||
return cmp(self.value, other.value)
|
|
||||||
else:
|
|
||||||
return cmp(self.value, other)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if isinstance(other, level):
|
if isinstance(other, level):
|
||||||
@ -552,13 +561,24 @@ class level(object):
|
|||||||
else:
|
else:
|
||||||
return self.value == other
|
return self.value == other
|
||||||
|
|
||||||
def __ne__(self, other):
|
def __lt__(self, other):
|
||||||
if isinstance(other, level):
|
if isinstance(other, level):
|
||||||
return self.value != other.value
|
return self.value < other.value
|
||||||
elif isinstance(other, basestring):
|
elif isinstance(other, basestring):
|
||||||
return self.name != other
|
return self.name < other
|
||||||
else:
|
else:
|
||||||
return self.value != other
|
return self.value < other
|
||||||
|
|
||||||
|
|
||||||
|
class _EnumMeta(type):
|
||||||
|
|
||||||
|
def __str__(cls):
|
||||||
|
return str(cls.levels)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
name = name.lower()
|
||||||
|
if name in self.__dict__:
|
||||||
|
return self.__dict__[name]
|
||||||
|
|
||||||
|
|
||||||
def enum(args, start=0, step=1):
|
def enum(args, start=0, step=1):
|
||||||
@ -583,11 +603,7 @@ def enum(args, start=0, step=1):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Enum(object):
|
class Enum(with_metaclass(_EnumMeta, object)):
|
||||||
|
|
||||||
class __metaclass__(type):
|
|
||||||
def __str__(cls):
|
|
||||||
return str(cls.levels)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_pod(cls, pod):
|
def from_pod(cls, pod):
|
||||||
@ -642,14 +658,14 @@ class ParameterDict(dict):
|
|||||||
# Function to determine the appropriate prefix based on the parameters type
|
# Function to determine the appropriate prefix based on the parameters type
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_prefix(obj):
|
def _get_prefix(obj):
|
||||||
if isinstance(obj, basestring):
|
if isinstance(obj, str):
|
||||||
prefix = 's'
|
prefix = 's'
|
||||||
elif isinstance(obj, float):
|
elif isinstance(obj, float):
|
||||||
prefix = 'f'
|
prefix = 'f'
|
||||||
elif isinstance(obj, long):
|
|
||||||
prefix = 'd'
|
|
||||||
elif isinstance(obj, bool):
|
elif isinstance(obj, bool):
|
||||||
prefix = 'b'
|
prefix = 'b'
|
||||||
|
elif isinstance(obj, long):
|
||||||
|
prefix = 'i'
|
||||||
elif isinstance(obj, int):
|
elif isinstance(obj, int):
|
||||||
prefix = 'i'
|
prefix = 'i'
|
||||||
elif obj is None:
|
elif obj is None:
|
||||||
@ -686,7 +702,7 @@ class ParameterDict(dict):
|
|||||||
elif value_type == 'b':
|
elif value_type == 'b':
|
||||||
return boolean(value)
|
return boolean(value)
|
||||||
elif value_type == 'd':
|
elif value_type == 'd':
|
||||||
return long(value)
|
return int(value)
|
||||||
elif value_type == 'f':
|
elif value_type == 'f':
|
||||||
return float(value)
|
return float(value)
|
||||||
elif value_type == 'i':
|
elif value_type == 'i':
|
||||||
@ -700,7 +716,7 @@ class ParameterDict(dict):
|
|||||||
raise ValueError('Unknown {} {}'.format(type(string), string))
|
raise ValueError('Unknown {} {}'.format(type(string), string))
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
for k, v in kwargs.iteritems():
|
for k, v in kwargs.items():
|
||||||
self.__setitem__(k, v)
|
self.__setitem__(k, v)
|
||||||
dict.__init__(self, *args)
|
dict.__init__(self, *args)
|
||||||
|
|
||||||
@ -714,7 +730,7 @@ class ParameterDict(dict):
|
|||||||
return dict.__contains__(self, self._encode(item))
|
return dict.__contains__(self, self._encode(item))
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return iter((k, self._decode(v)) for (k, v) in self.items())
|
return iter((k, self._decode(v)) for (k, v) in list(self.items()))
|
||||||
|
|
||||||
def iteritems(self):
|
def iteritems(self):
|
||||||
return self.__iter__()
|
return self.__iter__()
|
||||||
@ -730,7 +746,10 @@ class ParameterDict(dict):
|
|||||||
return (key, self._decode(value))
|
return (key, self._decode(value))
|
||||||
|
|
||||||
def iter_encoded_items(self):
|
def iter_encoded_items(self):
|
||||||
return dict.iteritems(self)
|
if sys.version_info[0] == 3:
|
||||||
|
return dict.items(self)
|
||||||
|
else:
|
||||||
|
return dict.iteritems(self)
|
||||||
|
|
||||||
def get_encoded_value(self, name):
|
def get_encoded_value(self, name):
|
||||||
return dict.__getitem__(self, name)
|
return dict.__getitem__(self, name)
|
||||||
@ -743,7 +762,7 @@ class ParameterDict(dict):
|
|||||||
if isinstance(d, ParameterDict):
|
if isinstance(d, ParameterDict):
|
||||||
dict.update(self, d)
|
dict.update(self, d)
|
||||||
else:
|
else:
|
||||||
for k, v in d.iteritems():
|
for k, v in d.items():
|
||||||
self[k] = v
|
self[k] = v
|
||||||
|
|
||||||
|
|
||||||
@ -762,7 +781,7 @@ class cpu_mask(object):
|
|||||||
self._mask = 0
|
self._mask = 0
|
||||||
if isinstance(cpus, int):
|
if isinstance(cpus, int):
|
||||||
self._mask = cpus
|
self._mask = cpus
|
||||||
elif isinstance(cpus, basestring):
|
elif isinstance(cpus, str):
|
||||||
if cpus[:2] == '0x' or cpus[:2] == '0X':
|
if cpus[:2] == '0x' or cpus[:2] == '0X':
|
||||||
self._mask = int(cpus, 16)
|
self._mask = int(cpus, 16)
|
||||||
else:
|
else:
|
||||||
|
@ -18,7 +18,13 @@
|
|||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import urllib
|
|
||||||
|
from future.standard_library import install_aliases
|
||||||
|
install_aliases()
|
||||||
|
|
||||||
|
import urllib.request
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.error
|
||||||
|
|
||||||
from wa import ApkWorkload, Parameter, ConfigError, WorkloadError
|
from wa import ApkWorkload, Parameter, ConfigError, WorkloadError
|
||||||
from wa.framework.configuration.core import settings
|
from wa.framework.configuration.core import settings
|
||||||
@ -81,7 +87,7 @@ class ExoPlayer(ApkWorkload):
|
|||||||
Playback duration of the video file. This becomes the duration of the workload.
|
Playback duration of the video file. This becomes the duration of the workload.
|
||||||
If provided must be shorter than the length of the media.
|
If provided must be shorter than the length of the media.
|
||||||
"""),
|
"""),
|
||||||
Parameter('format', allowed_values=DOWNLOAD_URLS.keys(),
|
Parameter('format', allowed_values=list(DOWNLOAD_URLS.keys()),
|
||||||
description="""
|
description="""
|
||||||
Specifies which format video file to play. Default is {}
|
Specifies which format video file to play. Default is {}
|
||||||
""".format(default_format)),
|
""".format(default_format)),
|
||||||
@ -137,7 +143,7 @@ class ExoPlayer(ApkWorkload):
|
|||||||
filename = '{}_{}'.format(format_resolution, os.path.basename(url))
|
filename = '{}_{}'.format(format_resolution, os.path.basename(url))
|
||||||
filepath = os.path.join(self.video_directory, filename)
|
filepath = os.path.join(self.video_directory, filename)
|
||||||
self.logger.info('Downloading {} to {}...'.format(url, filepath))
|
self.logger.info('Downloading {} to {}...'.format(url, filepath))
|
||||||
urllib.urlretrieve(url, filepath)
|
urllib.request.urlretrieve(url, filepath)
|
||||||
return filepath
|
return filepath
|
||||||
else:
|
else:
|
||||||
if len(files) > 1:
|
if len(files) > 1:
|
||||||
@ -172,7 +178,7 @@ class ExoPlayer(ApkWorkload):
|
|||||||
self.play_cmd = 'am start -a {} -d "file://{}"'.format(self.action,
|
self.play_cmd = 'am start -a {} -d "file://{}"'.format(self.action,
|
||||||
self.device_video_file)
|
self.device_video_file)
|
||||||
|
|
||||||
self.monitor = self.target.get_logcat_monitor(REGEXPS.values())
|
self.monitor = self.target.get_logcat_monitor(list(REGEXPS.values()))
|
||||||
self.monitor.start()
|
self.monitor.start()
|
||||||
|
|
||||||
def run(self, context):
|
def run(self, context):
|
||||||
|
@ -84,7 +84,7 @@ class Hackbench(Workload):
|
|||||||
results_file = context.get_artifact_path('hackbench-results')
|
results_file = context.get_artifact_path('hackbench-results')
|
||||||
with open(results_file) as fh:
|
with open(results_file) as fh:
|
||||||
for line in fh:
|
for line in fh:
|
||||||
for label, (regex, units) in regex_map.iteritems():
|
for label, (regex, units) in regex_map.items():
|
||||||
match = regex.search(line)
|
match = regex.search(line)
|
||||||
if match:
|
if match:
|
||||||
context.add_metric(label, float(match.group(1)), units)
|
context.add_metric(label, float(match.group(1)), units)
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
# pylint: disable=E1101,W0201,E0203
|
# pylint: disable=E1101,W0201,E0203
|
||||||
|
|
||||||
from __future__ import division
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import select
|
import select
|
||||||
@ -23,6 +23,7 @@ import json
|
|||||||
import threading
|
import threading
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
from copy import copy
|
from copy import copy
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
@ -143,7 +144,7 @@ class Jankbench(ApkWorkload):
|
|||||||
|
|
||||||
for test_name, rep in results.index:
|
for test_name, rep in results.index:
|
||||||
test_results = results.ix[test_name, rep]
|
test_results = results.ix[test_name, rep]
|
||||||
for metric, value in test_results.iteritems():
|
for metric, value in test_results.items():
|
||||||
context.add_metric(metric, value, units=None, lower_is_better=True,
|
context.add_metric(metric, value, units=None, lower_is_better=True,
|
||||||
classifiers={'test_name': test_name, 'rep': rep})
|
classifiers={'test_name': test_name, 'rep': rep})
|
||||||
|
|
||||||
@ -222,6 +223,8 @@ class JankbenchRunMonitor(threading.Thread):
|
|||||||
ready, _, _ = select.select([proc.stdout, proc.stderr], [], [], 2)
|
ready, _, _ = select.select([proc.stdout, proc.stderr], [], [], 2)
|
||||||
if ready:
|
if ready:
|
||||||
line = ready[0].readline()
|
line = ready[0].readline()
|
||||||
|
if sys.version_info[0] == 3:
|
||||||
|
line = line.decode(sys.stdout.encoding)
|
||||||
if self.regex.search(line):
|
if self.regex.search(line):
|
||||||
self.run_ended.set()
|
self.run_ended.set()
|
||||||
|
|
||||||
|
@ -145,7 +145,7 @@ class Meabo(Workload):
|
|||||||
Controls which phases to run.
|
Controls which phases to run.
|
||||||
''',
|
''',
|
||||||
constraint=lambda x: all(0 < v <=10 for v in x),
|
constraint=lambda x: all(0 < v <=10 for v in x),
|
||||||
default=range(1, 11),
|
default=list(range(1, 11)),
|
||||||
),
|
),
|
||||||
Parameter(
|
Parameter(
|
||||||
'threads',
|
'threads',
|
||||||
|
@ -102,7 +102,7 @@ class Openssl(Workload):
|
|||||||
|
|
||||||
parts = line.split(':')
|
parts = line.split(':')
|
||||||
if parts[0] == '+F': # evp ciphers
|
if parts[0] == '+F': # evp ciphers
|
||||||
for bs, value in zip(BLOCK_SIZES, map(float, parts[3:])):
|
for bs, value in zip(BLOCK_SIZES, list(map(float, parts[3:]))):
|
||||||
value = value / 2**20 # to MB
|
value = value / 2**20 # to MB
|
||||||
context.add_metric('score', value, 'MB/s',
|
context.add_metric('score', value, 'MB/s',
|
||||||
classifiers={'block_size': bs})
|
classifiers={'block_size': bs})
|
||||||
|
@ -135,16 +135,16 @@ class Sysbench(Workload):
|
|||||||
|
|
||||||
with open(self.host_results_file) as fh:
|
with open(self.host_results_file) as fh:
|
||||||
find_line_with('General statistics:', fh)
|
find_line_with('General statistics:', fh)
|
||||||
extract_metric('total time', fh.next(), context.output)
|
extract_metric('total time', next(fh), context.output)
|
||||||
extract_metric('total number of events', fh.next(), context.output, lower_is_better=False)
|
extract_metric('total number of events', next(fh), context.output, lower_is_better=False)
|
||||||
find_line_with('response time:', fh)
|
find_line_with('response time:', fh)
|
||||||
extract_metric('min', fh.next(), context.output, 'response time ')
|
extract_metric('min', next(fh), context.output, 'response time ')
|
||||||
extract_metric('avg', fh.next(), context.output, 'response time ')
|
extract_metric('avg', next(fh), context.output, 'response time ')
|
||||||
extract_metric('max', fh.next(), context.output, 'response time ')
|
extract_metric('max', next(fh), context.output, 'response time ')
|
||||||
extract_metric('approx. 95 percentile', fh.next(), context.output)
|
extract_metric('approx. 95 percentile', next(fh), context.output)
|
||||||
find_line_with('Threads fairness:', fh)
|
find_line_with('Threads fairness:', fh)
|
||||||
extract_threads_fairness_metric('events', fh.next(), context.output)
|
extract_threads_fairness_metric('events', next(fh), context.output)
|
||||||
extract_threads_fairness_metric('execution time', fh.next(), context.output)
|
extract_threads_fairness_metric('execution time', next(fh), context.output)
|
||||||
|
|
||||||
def teardown(self, context):
|
def teardown(self, context):
|
||||||
self.target.remove(self.target_results_file)
|
self.target.remove(self.target_results_file)
|
||||||
@ -155,7 +155,7 @@ class Sysbench(Workload):
|
|||||||
|
|
||||||
def _build_command(self, **parameters):
|
def _build_command(self, **parameters):
|
||||||
param_strings = ['--{}={}'.format(k.replace('_', '-'), v)
|
param_strings = ['--{}={}'.format(k.replace('_', '-'), v)
|
||||||
for k, v in parameters.iteritems()]
|
for k, v in parameters.items()]
|
||||||
if self.file_test_mode:
|
if self.file_test_mode:
|
||||||
param_strings.append('--file-test-mode={}'.format(self.file_test_mode))
|
param_strings.append('--file-test-mode={}'.format(self.file_test_mode))
|
||||||
sysbench_command = '{} {} {} run'.format(self.target_binary, ' '.join(param_strings), self.cmd_params)
|
sysbench_command = '{} {} {} run'.format(self.target_binary, ' '.join(param_strings), self.cmd_params)
|
||||||
|
@ -17,7 +17,7 @@ import os
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from HTMLParser import HTMLParser
|
from html.parser import HTMLParser
|
||||||
|
|
||||||
from wa import ApkUiautoWorkload, Parameter
|
from wa import ApkUiautoWorkload, Parameter
|
||||||
from wa.utils.types import list_of_strs
|
from wa.utils.types import list_of_strs
|
||||||
@ -48,7 +48,7 @@ class Vellamo(ApkUiautoWorkload):
|
|||||||
'3.0': ['Browser', 'Metal', 'Multi'],
|
'3.0': ['Browser', 'Metal', 'Multi'],
|
||||||
'3.2.4': ['Browser', 'Metal', 'Multi'],
|
'3.2.4': ['Browser', 'Metal', 'Multi'],
|
||||||
}
|
}
|
||||||
valid_versions = benchmark_types.keys()
|
valid_versions = list(benchmark_types.keys())
|
||||||
summary_metrics = None
|
summary_metrics = None
|
||||||
|
|
||||||
parameters = [
|
parameters = [
|
||||||
@ -119,7 +119,7 @@ class Vellamo(ApkUiautoWorkload):
|
|||||||
benchmark.name = benchmark.name.replace(' ', '_')
|
benchmark.name = benchmark.name.replace(' ', '_')
|
||||||
context.add_metric('{}_Total'.format(benchmark.name),
|
context.add_metric('{}_Total'.format(benchmark.name),
|
||||||
benchmark.score)
|
benchmark.score)
|
||||||
for name, score in benchmark.metrics.items():
|
for name, score in list(benchmark.metrics.items()):
|
||||||
name = name.replace(' ', '_')
|
name = name.replace(' ', '_')
|
||||||
context.add_metric('{}_{}'.format(benchmark.name,
|
context.add_metric('{}_{}'.format(benchmark.name,
|
||||||
name), score)
|
name), score)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user