mirror of
https://github.com/ARM-software/workload-automation.git
synced 2025-09-04 04:12:42 +01:00
New target description + moving target stuff under "framework"
Changing the way target descriptions work from a static mapping to something that is dynamically generated and is extensible via plugins. Also moving core target implementation stuff under "framework".
This commit is contained in:
@@ -1,2 +1,19 @@
|
||||
from wa.framework.configuration.core import settings, ConfigurationPoint, PluginConfiguration
|
||||
from wa.framework.configuration.core import merge_config_values, WA_CONFIGURATION
|
||||
# Copyright 2013-2016 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from wlauto.core.configuration.configuration import (settings,
|
||||
RunConfiguration,
|
||||
JobGenerator,
|
||||
ConfigurationPoint)
|
||||
from wlauto.core.configuration.plugin_cache import PluginCache
|
||||
|
File diff suppressed because it is too large
Load Diff
42
wa/framework/configuration/default.py
Normal file
42
wa/framework/configuration/default.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from wlauto.core.configuration.configuration import MetaConfiguration, RunConfiguration
|
||||
from wlauto.core.configuration.plugin_cache import PluginCache
|
||||
from wlauto.utils.serializer import yaml
|
||||
from wlauto.utils.doc import strip_inlined_text
|
||||
|
||||
DEFAULT_INSTRUMENTS = ['execution_time',
|
||||
'interrupts',
|
||||
'cpufreq',
|
||||
'status',
|
||||
'standard',
|
||||
'csv']
|
||||
|
||||
|
||||
def _format_yaml_comment(param, short_description=False):
|
||||
comment = param.description
|
||||
comment = strip_inlined_text(comment)
|
||||
if short_description:
|
||||
comment = comment.split('\n\n')[0]
|
||||
comment = comment.replace('\n', '\n# ')
|
||||
comment = "# {}\n".format(comment)
|
||||
return comment
|
||||
|
||||
|
||||
def _format_instruments(output):
|
||||
plugin_cache = PluginCache()
|
||||
output.write("instrumentation:\n")
|
||||
for plugin in DEFAULT_INSTRUMENTS:
|
||||
plugin_cls = plugin_cache.loader.get_plugin_class(plugin)
|
||||
output.writelines(_format_yaml_comment(plugin_cls, short_description=True))
|
||||
output.write(" - {}\n".format(plugin))
|
||||
output.write("\n")
|
||||
|
||||
|
||||
def generate_default_config(path):
|
||||
with open(path, 'w') as output:
|
||||
for param in MetaConfiguration.config_points + RunConfiguration.config_points:
|
||||
entry = {param.name: param.default}
|
||||
comment = _format_yaml_comment(param)
|
||||
output.writelines(comment)
|
||||
yaml.dump(entry, output, default_flow_style=False)
|
||||
output.write("\n")
|
||||
_format_instruments(output)
|
@@ -1,67 +1,222 @@
|
||||
from copy import copy
|
||||
from collections import OrderedDict
|
||||
import random
|
||||
from itertools import izip_longest, groupby, chain
|
||||
|
||||
from wa.framework import pluginloader
|
||||
from wa.framework.exception import ConfigError
|
||||
from wa.framework.configuration.core import ConfigurationPoint
|
||||
from wa.framework.utils.types import TreeNode, list_of, identifier
|
||||
from wa.framework.configuration.core import (MetaConfiguration, RunConfiguration,
|
||||
JobGenerator, settings)
|
||||
from wa.framework.configuration.parsers import ConfigParser
|
||||
from wa.framework.configuration.plugin_cache import PluginCache
|
||||
|
||||
|
||||
class ExecConfig(object):
|
||||
class CombinedConfig(object):
|
||||
|
||||
static_config_points = [
|
||||
ConfigurationPoint(
|
||||
'components',
|
||||
kind=list_of(identifier),
|
||||
description="""
|
||||
Components to be activated.
|
||||
""",
|
||||
),
|
||||
ConfigurationPoint(
|
||||
'runtime_parameters',
|
||||
kind=list_of(identifier),
|
||||
aliases=['runtime_params'],
|
||||
description="""
|
||||
Components to be activated.
|
||||
""",
|
||||
),
|
||||
ConfigurationPoint(
|
||||
'classifiers',
|
||||
kind=list_of(str),
|
||||
description="""
|
||||
Classifiers to be used. Classifiers are arbitrary key-value
|
||||
pairs associated with with config. They may be used during output
|
||||
proicessing and should be used to provide additional context for
|
||||
collected results.
|
||||
""",
|
||||
),
|
||||
]
|
||||
@staticmethod
|
||||
def from_pod(pod):
|
||||
instance = CombinedConfig()
|
||||
instance.settings = MetaConfiguration.from_pod(pod.get('settings', {}))
|
||||
instance.run_config = RunConfiguration.from_pod(pod.get('run_config', {}))
|
||||
return instance
|
||||
|
||||
config_points = None
|
||||
def __init__(self, settings=None, run_config=None):
|
||||
self.settings = settings
|
||||
self.run_config = run_config
|
||||
|
||||
@classmethod
|
||||
def _load(cls, load_global=False, loader=pluginloader):
|
||||
if cls.config_points is None:
|
||||
cls.config_points = {c.name: c for c in cls.static_config_points}
|
||||
for plugin in loader.list_plugins():
|
||||
cp = ConfigurationPoint(
|
||||
plugin.name,
|
||||
kind=OrderedDict,
|
||||
description="""
|
||||
Configuration for {} plugin.
|
||||
""".format(plugin.name)
|
||||
)
|
||||
cls._add_config_point(plugin.name, cp)
|
||||
for alias in plugin.aliases:
|
||||
cls._add_config_point(alias.name, cp)
|
||||
|
||||
@classmethod
|
||||
def _add_config_point(cls, name, cp):
|
||||
if name in cls.config_points:
|
||||
message = 'Cofig point for "{}" already exists ("{}")'
|
||||
raise ValueError(message.format(name, cls.config_points[name].name))
|
||||
def to_pod(self):
|
||||
return {'settings': self.settings.to_pod(),
|
||||
'run_config': self.run_config.to_pod()}
|
||||
|
||||
|
||||
class JobStatus:
|
||||
PENDING = 0
|
||||
RUNNING = 1
|
||||
OK = 2
|
||||
FAILED = 3
|
||||
PARTIAL = 4
|
||||
ABORTED = 5
|
||||
PASSED = 6
|
||||
|
||||
class GlobalExecConfig(ExecConfig):
|
||||
|
||||
class Job(object):
|
||||
|
||||
def __init__(self, spec, iteration, context):
|
||||
self.spec = spec
|
||||
self.iteration = iteration
|
||||
self.context = context
|
||||
self.status = 'new'
|
||||
self.workload = None
|
||||
self.output = None
|
||||
|
||||
def load(self, target, loader=pluginloader):
|
||||
self.workload = loader.get_workload(self.spec.workload_name,
|
||||
target,
|
||||
**self.spec.workload_parameters)
|
||||
self.workload.init_resources(self.context)
|
||||
self.workload.validate()
|
||||
|
||||
|
||||
class ConfigManager(object):
|
||||
"""
|
||||
Represents run-time state of WA. Mostly used as a container for loaded
|
||||
configuration and discovered plugins.
|
||||
|
||||
This exists outside of any command or run and is associated with the running
|
||||
instance of wA itself.
|
||||
"""
|
||||
|
||||
@property
|
||||
def enabled_instruments(self):
|
||||
return self.jobs_config.enabled_instruments
|
||||
|
||||
@property
|
||||
def job_specs(self):
|
||||
if not self._jobs_generated:
|
||||
msg = 'Attempting to access job specs before '\
|
||||
'jobs have been generated'
|
||||
raise RuntimeError(msg)
|
||||
return [j.spec for j in self._jobs]
|
||||
|
||||
@property
|
||||
def jobs(self):
|
||||
if not self._jobs_generated:
|
||||
msg = 'Attempting to access jobs before '\
|
||||
'they have been generated'
|
||||
raise RuntimeError(msg)
|
||||
return self._jobs
|
||||
|
||||
def __init__(self, settings=settings):
|
||||
self.settings = settings
|
||||
self.run_config = RunConfiguration()
|
||||
self.plugin_cache = PluginCache()
|
||||
self.jobs_config = JobGenerator(self.plugin_cache)
|
||||
self.loaded_config_sources = []
|
||||
self._config_parser = ConfigParser()
|
||||
self._jobs = []
|
||||
self._jobs_generated = False
|
||||
self.agenda = None
|
||||
|
||||
def load_config_file(self, filepath):
|
||||
self._config_parser.load_from_path(self, filepath)
|
||||
self.loaded_config_sources.append(filepath)
|
||||
|
||||
def load_config(self, values, source, wrap_exceptions=True):
|
||||
self._config_parser.load(self, values, source)
|
||||
self.loaded_config_sources.append(source)
|
||||
|
||||
def get_plugin(self, name=None, kind=None, *args, **kwargs):
|
||||
return self.plugin_cache.get_plugin(name, kind, *args, **kwargs)
|
||||
|
||||
def get_instruments(self, target):
|
||||
instruments = []
|
||||
for name in self.enabled_instruments:
|
||||
instruments.append(self.get_plugin(name, kind='instrument',
|
||||
target=target))
|
||||
return instruments
|
||||
|
||||
def finalize(self):
|
||||
if not self.agenda:
|
||||
msg = 'Attempting to finalize config before agenda has been set'
|
||||
raise RuntimeError(msg)
|
||||
self.run_config.merge_device_config(self.plugin_cache)
|
||||
return CombinedConfig(self.settings, self.run_config)
|
||||
|
||||
def generate_jobs(self, context):
|
||||
job_specs = self.jobs_config.generate_job_specs(context.tm)
|
||||
exec_order = self.run_config.execution_order
|
||||
for spec, i in permute_iterations(job_specs, exec_order):
|
||||
job = Job(spec, i, context)
|
||||
job.load(context.tm.target)
|
||||
self._jobs.append(job)
|
||||
self._jobs_generated = True
|
||||
|
||||
|
||||
def permute_by_job(specs):
|
||||
"""
|
||||
This is that "classic" implementation that executes all iterations of a
|
||||
workload spec before proceeding onto the next spec.
|
||||
|
||||
"""
|
||||
for spec in specs:
|
||||
for i in range(1, spec.iterations + 1):
|
||||
yield (spec, i)
|
||||
|
||||
|
||||
def permute_by_iteration(specs):
|
||||
"""
|
||||
Runs the first iteration for all benchmarks first, before proceeding to the
|
||||
next iteration, i.e. A1, B1, C1, A2, B2, C2... instead of A1, A1, B1, B2,
|
||||
C1, C2...
|
||||
|
||||
If multiple sections where specified in the agenda, this will run all
|
||||
sections for the first global spec first, followed by all sections for the
|
||||
second spec, etc.
|
||||
|
||||
e.g. given sections X and Y, and global specs A and B, with 2 iterations,
|
||||
this will run
|
||||
|
||||
X.A1, Y.A1, X.B1, Y.B1, X.A2, Y.A2, X.B2, Y.B2
|
||||
|
||||
"""
|
||||
groups = [list(g) for k, g in groupby(specs, lambda s: s.workload_id)]
|
||||
|
||||
all_tuples = []
|
||||
for spec in chain(*groups):
|
||||
all_tuples.append([(spec, i + 1)
|
||||
for i in xrange(spec.iterations)])
|
||||
for t in chain(*map(list, izip_longest(*all_tuples))):
|
||||
if t is not None:
|
||||
yield t
|
||||
|
||||
|
||||
def permute_by_section(specs):
|
||||
"""
|
||||
Runs the first iteration for all benchmarks first, before proceeding to the
|
||||
next iteration, i.e. A1, B1, C1, A2, B2, C2... instead of A1, A1, B1, B2,
|
||||
C1, C2...
|
||||
|
||||
If multiple sections where specified in the agenda, this will run all specs
|
||||
for the first section followed by all specs for the seciod section, etc.
|
||||
|
||||
e.g. given sections X and Y, and global specs A and B, with 2 iterations,
|
||||
this will run
|
||||
|
||||
X.A1, X.B1, Y.A1, Y.B1, X.A2, X.B2, Y.A2, Y.B2
|
||||
|
||||
"""
|
||||
groups = [list(g) for k, g in groupby(specs, lambda s: s.section_id)]
|
||||
|
||||
all_tuples = []
|
||||
for spec in chain(*groups):
|
||||
all_tuples.append([(spec, i + 1)
|
||||
for i in xrange(spec.iterations)])
|
||||
for t in chain(*map(list, izip_longest(*all_tuples))):
|
||||
if t is not None:
|
||||
yield t
|
||||
|
||||
|
||||
def permute_randomly(specs):
|
||||
"""
|
||||
This will generate a random permutation of specs/iteration tuples.
|
||||
|
||||
"""
|
||||
result = []
|
||||
for spec in specs:
|
||||
for i in xrange(1, spec.iterations + 1):
|
||||
result.append((spec, i))
|
||||
random.shuffle(result)
|
||||
for t in result:
|
||||
yield t
|
||||
|
||||
|
||||
permute_map = {
|
||||
'by_iteration': permute_by_iteration,
|
||||
'by_job': permute_by_job,
|
||||
'by_section': permute_by_section,
|
||||
'random': permute_randomly,
|
||||
}
|
||||
|
||||
|
||||
def permute_iterations(specs, exec_order):
|
||||
if exec_order not in permute_map:
|
||||
msg = 'Unknown execution order "{}"; must be in: {}'
|
||||
raise ValueError(msg.format(exec_order, permute_map.keys()))
|
||||
return permute_map[exec_order](specs)
|
||||
|
308
wa/framework/configuration/parsers.py
Normal file
308
wa/framework/configuration/parsers.py
Normal file
@@ -0,0 +1,308 @@
|
||||
# Copyright 2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
from wlauto.exceptions import ConfigError
|
||||
from wlauto.utils.serializer import read_pod, SerializerSyntaxError
|
||||
from wlauto.utils.types import toggle_set, counter
|
||||
from wlauto.core.configuration.configuration import JobSpec
|
||||
|
||||
|
||||
###############
|
||||
### Parsers ###
|
||||
###############
|
||||
|
||||
class ConfigParser(object):
|
||||
|
||||
def load_from_path(self, state, filepath):
|
||||
self.load(state, _load_file(filepath, "Config"), filepath)
|
||||
|
||||
def load(self, state, raw, source, wrap_exceptions=True): # pylint: disable=too-many-branches
|
||||
try:
|
||||
if 'run_name' in raw:
|
||||
msg = '"run_name" can only be specified in the config '\
|
||||
'section of an agenda'
|
||||
raise ConfigError(msg)
|
||||
|
||||
if 'id' in raw:
|
||||
raise ConfigError('"id" cannot be set globally')
|
||||
|
||||
merge_result_processors_instruments(raw)
|
||||
|
||||
# Get WA core configuration
|
||||
for cfg_point in state.settings.configuration.itervalues():
|
||||
value = get_aliased_param(cfg_point, raw)
|
||||
if value is not None:
|
||||
state.settings.set(cfg_point.name, value)
|
||||
|
||||
# Get run specific configuration
|
||||
for cfg_point in state.run_config.configuration.itervalues():
|
||||
value = get_aliased_param(cfg_point, raw)
|
||||
if value is not None:
|
||||
state.run_config.set(cfg_point.name, value)
|
||||
|
||||
# Get global job spec configuration
|
||||
for cfg_point in JobSpec.configuration.itervalues():
|
||||
value = get_aliased_param(cfg_point, raw)
|
||||
if value is not None:
|
||||
state.jobs_config.set_global_value(cfg_point.name, value)
|
||||
|
||||
for name, values in raw.iteritems():
|
||||
# Assume that all leftover config is for a plug-in or a global
|
||||
# alias it is up to PluginCache to assert this assumption
|
||||
state.plugin_cache.add_configs(name, values, source)
|
||||
|
||||
except ConfigError as e:
|
||||
if wrap_exceptions:
|
||||
raise ConfigError('Error in "{}":\n{}'.format(source, str(e)))
|
||||
else:
|
||||
raise e
|
||||
|
||||
|
||||
class AgendaParser(object):
|
||||
|
||||
def load_from_path(self, state, filepath):
|
||||
raw = _load_file(filepath, 'Agenda')
|
||||
self.load(state, raw, filepath)
|
||||
|
||||
def load(self, state, raw, source):
|
||||
try:
|
||||
if not isinstance(raw, dict):
|
||||
raise ConfigError('Invalid agenda, top level entry must be a dict')
|
||||
|
||||
self._populate_and_validate_config(state, raw, source)
|
||||
sections = self._pop_sections(raw)
|
||||
global_workloads = self._pop_workloads(raw)
|
||||
|
||||
if raw:
|
||||
msg = 'Invalid top level agenda entry(ies): "{}"'
|
||||
raise ConfigError(msg.format('", "'.join(raw.keys())))
|
||||
|
||||
sect_ids, wkl_ids = self._collect_ids(sections, global_workloads)
|
||||
self._process_global_workloads(state, global_workloads, wkl_ids)
|
||||
self._process_sections(state, sections, sect_ids, wkl_ids)
|
||||
|
||||
state.agenda = source
|
||||
|
||||
except (ConfigError, SerializerSyntaxError) as e:
|
||||
raise ConfigError('Error in "{}":\n\t{}'.format(source, str(e)))
|
||||
|
||||
def _populate_and_validate_config(self, state, raw, source):
|
||||
for name in ['config', 'global']:
|
||||
entry = raw.pop(name, None)
|
||||
if entry is None:
|
||||
continue
|
||||
|
||||
if not isinstance(entry, dict):
|
||||
msg = 'Invalid entry "{}" - must be a dict'
|
||||
raise ConfigError(msg.format(name))
|
||||
|
||||
if 'run_name' in entry:
|
||||
state.run_config.set('run_name', entry.pop('run_name'))
|
||||
|
||||
state.load_config(entry, source, wrap_exceptions=False)
|
||||
|
||||
def _pop_sections(self, raw):
|
||||
sections = raw.pop("sections", [])
|
||||
if not isinstance(sections, list):
|
||||
raise ConfigError('Invalid entry "sections" - must be a list')
|
||||
return sections
|
||||
|
||||
def _pop_workloads(self, raw):
|
||||
workloads = raw.pop("workloads", [])
|
||||
if not isinstance(workloads, list):
|
||||
raise ConfigError('Invalid entry "workloads" - must be a list')
|
||||
return workloads
|
||||
|
||||
def _collect_ids(self, sections, global_workloads):
|
||||
seen_section_ids = set()
|
||||
seen_workload_ids = set()
|
||||
|
||||
for workload in global_workloads:
|
||||
workload = _get_workload_entry(workload)
|
||||
_collect_valid_id(workload.get("id"), seen_workload_ids, "workload")
|
||||
|
||||
for section in sections:
|
||||
_collect_valid_id(section.get("id"), seen_section_ids, "section")
|
||||
for workload in section["workloads"] if "workloads" in section else []:
|
||||
workload = _get_workload_entry(workload)
|
||||
_collect_valid_id(workload.get("id"), seen_workload_ids,
|
||||
"workload")
|
||||
|
||||
return seen_section_ids, seen_workload_ids
|
||||
|
||||
def _process_global_workloads(self, state, global_workloads, seen_wkl_ids):
|
||||
for workload_entry in global_workloads:
|
||||
workload = _process_workload_entry(workload_entry, seen_wkl_ids,
|
||||
state.jobs_config)
|
||||
state.jobs_config.add_workload(workload)
|
||||
|
||||
def _process_sections(self, state, sections, seen_sect_ids, seen_wkl_ids):
|
||||
for section in sections:
|
||||
workloads = []
|
||||
for workload_entry in section.pop("workloads", []):
|
||||
workload = _process_workload_entry(workload_entry, seen_workload_ids,
|
||||
state.jobs_config)
|
||||
workloads.append(workload)
|
||||
|
||||
section = _construct_valid_entry(section, seen_sect_ids,
|
||||
"s", state.jobs_config)
|
||||
state.jobs_config.add_section(section, workloads)
|
||||
|
||||
|
||||
########################
|
||||
### Helper functions ###
|
||||
########################
|
||||
|
||||
def get_aliased_param(cfg_point, d, default=None, pop=True):
|
||||
"""
|
||||
Given a ConfigurationPoint and a dict, this function will search the dict for
|
||||
the ConfigurationPoint's name/aliases. If more than one is found it will raise
|
||||
a ConfigError. If one (and only one) is found then it will return the value
|
||||
for the ConfigurationPoint. If the name or aliases are present in the dict it will
|
||||
return the "default" parameter of this function.
|
||||
"""
|
||||
aliases = [cfg_point.name] + cfg_point.aliases
|
||||
alias_map = [a for a in aliases if a in d]
|
||||
if len(alias_map) > 1:
|
||||
raise ConfigError(DUPLICATE_ENTRY_ERROR.format(aliases))
|
||||
elif alias_map:
|
||||
if pop:
|
||||
return d.pop(alias_map[0])
|
||||
else:
|
||||
return d[alias_map[0]]
|
||||
else:
|
||||
return default
|
||||
|
||||
|
||||
def _load_file(filepath, error_name):
|
||||
if not os.path.isfile(filepath):
|
||||
raise ValueError("{} does not exist".format(filepath))
|
||||
try:
|
||||
raw = read_pod(filepath)
|
||||
except SerializerSyntaxError as e:
|
||||
raise ConfigError('Error parsing {} {}: {}'.format(error_name, filepath, e))
|
||||
if not isinstance(raw, dict):
|
||||
message = '{} does not contain a valid {} structure; top level must be a dict.'
|
||||
raise ConfigError(message.format(filepath, error_name))
|
||||
return raw
|
||||
|
||||
|
||||
def merge_result_processors_instruments(raw):
|
||||
instr_config = JobSpec.configuration['instrumentation']
|
||||
instruments = toggle_set(get_aliased_param(instr_config, raw, default=[]))
|
||||
result_processors = toggle_set(raw.pop('result_processors', []))
|
||||
if instruments and result_processors:
|
||||
conflicts = instruments.conflicts_with(result_processors)
|
||||
if conflicts:
|
||||
msg = '"instrumentation" and "result_processors" have '\
|
||||
'conflicting entries: {}'
|
||||
entires = ', '.join('"{}"'.format(c.strip("~")) for c in conflicts)
|
||||
raise ConfigError(msg.format(entires))
|
||||
raw['instrumentation'] = instruments.merge_with(result_processors)
|
||||
|
||||
|
||||
def _pop_aliased(d, names, entry_id):
|
||||
name_count = sum(1 for n in names if n in d)
|
||||
if name_count > 1:
|
||||
names_list = ', '.join(names)
|
||||
msg = 'Inivalid workload entry "{}": at moust one of ({}}) must be specified.'
|
||||
raise ConfigError(msg.format(workload_entry['id'], names_list))
|
||||
for name in names:
|
||||
if name in d:
|
||||
return d.pop(name)
|
||||
return None
|
||||
|
||||
|
||||
def _construct_valid_entry(raw, seen_ids, prefix, jobs_config):
|
||||
workload_entry = {}
|
||||
|
||||
# Generate an automatic ID if the entry doesn't already have one
|
||||
if 'id' not in raw:
|
||||
while True:
|
||||
new_id = '{}{}'.format(prefix, counter(name=prefix))
|
||||
if new_id not in seen_ids:
|
||||
break
|
||||
workload_entry['id'] = new_id
|
||||
seen_ids.add(new_id)
|
||||
else:
|
||||
workload_entry['id'] = raw.pop('id')
|
||||
|
||||
# Process instrumentation
|
||||
merge_result_processors_instruments(raw)
|
||||
|
||||
# Validate all workload_entry
|
||||
for name, cfg_point in JobSpec.configuration.iteritems():
|
||||
value = get_aliased_param(cfg_point, raw)
|
||||
if value is not None:
|
||||
value = cfg_point.kind(value)
|
||||
cfg_point.validate_value(name, value)
|
||||
workload_entry[name] = value
|
||||
|
||||
wk_id = workload_entry['id']
|
||||
param_names = ['workload_params', 'workload_parameters']
|
||||
if prefix == 'wk':
|
||||
param_names += ['params', 'parameters']
|
||||
workload_entry["workload_parameters"] = _pop_aliased(raw, param_names, wk_id)
|
||||
|
||||
param_names = ['runtime_parameters', 'runtime_params']
|
||||
if prefix == 's':
|
||||
param_names += ['params', 'parameters']
|
||||
workload_entry["runtime_parameters"] = _pop_aliased(raw, param_names, wk_id)
|
||||
|
||||
param_names = ['boot_parameters', 'boot_params']
|
||||
workload_entry["boot_parameters"] = _pop_aliased(raw, param_names, wk_id)
|
||||
|
||||
if "instrumentation" in workload_entry:
|
||||
jobs_config.update_enabled_instruments(workload_entry["instrumentation"])
|
||||
|
||||
# error if there are unknown workload_entry
|
||||
if raw:
|
||||
msg = 'Invalid entry(ies) in "{}": "{}"'
|
||||
raise ConfigError(msg.format(workload_entry['id'], ', '.join(raw.keys())))
|
||||
|
||||
return workload_entry
|
||||
|
||||
|
||||
def _collect_valid_id(entry_id, seen_ids, entry_type):
|
||||
if entry_id is None:
|
||||
return
|
||||
if entry_id in seen_ids:
|
||||
raise ConfigError('Duplicate {} ID "{}".'.format(entry_type, entry_id))
|
||||
# "-" is reserved for joining section and workload IDs
|
||||
if "-" in entry_id:
|
||||
msg = 'Invalid {} ID "{}"; IDs cannot contain a "-"'
|
||||
raise ConfigError(msg.format(entry_type, entry_id))
|
||||
if entry_id == "global":
|
||||
msg = 'Invalid {} ID "global"; is a reserved ID'
|
||||
raise ConfigError(msg.format(entry_type))
|
||||
seen_ids.add(entry_id)
|
||||
|
||||
|
||||
def _get_workload_entry(workload):
|
||||
if isinstance(workload, basestring):
|
||||
workload = {'name': workload}
|
||||
elif not isinstance(workload, dict):
|
||||
raise ConfigError('Invalid workload entry: "{}"')
|
||||
return workload
|
||||
|
||||
|
||||
def _process_workload_entry(workload, seen_workload_ids, jobs_config):
|
||||
workload = _get_workload_entry(workload)
|
||||
workload = _construct_valid_entry(workload, seen_workload_ids,
|
||||
"wk", jobs_config)
|
||||
return workload
|
||||
|
227
wa/framework/configuration/plugin_cache.py
Normal file
227
wa/framework/configuration/plugin_cache.py
Normal file
@@ -0,0 +1,227 @@
|
||||
# Copyright 2016 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from copy import copy
|
||||
from collections import defaultdict
|
||||
from itertools import chain
|
||||
|
||||
from devlib.utils.misc import memoized
|
||||
|
||||
from wa.framework import pluginloader
|
||||
from wa.framework.exception import ConfigError
|
||||
from wa.framework.target.descriptor import get_target_descriptions
|
||||
from wa.utils.types import obj_dict
|
||||
|
||||
GENERIC_CONFIGS = ["device_config", "workload_parameters",
|
||||
"boot_parameters", "runtime_parameters"]
|
||||
|
||||
|
||||
class PluginCache(object):
|
||||
"""
|
||||
The plugin cache is used to store configuration that cannot be processed at
|
||||
this stage, whether thats because it is unknown if its needed
|
||||
(in the case of disabled plug-ins) or it is not know what it belongs to (in
|
||||
the case of "device-config" ect.). It also maintains where configuration came
|
||||
from, and the priority order of said sources.
|
||||
"""
|
||||
|
||||
def __init__(self, loader=pluginloader):
|
||||
self.loader = loader
|
||||
self.sources = []
|
||||
self.plugin_configs = defaultdict(lambda: defaultdict(dict))
|
||||
self.global_alias_values = defaultdict(dict)
|
||||
self.targets = {td.name: td for td in get_target_descriptions()}
|
||||
|
||||
# Generate a mapping of what global aliases belong to
|
||||
self._global_alias_map = defaultdict(dict)
|
||||
self._list_of_global_aliases = set()
|
||||
for plugin in self.loader.list_plugins():
|
||||
for param in plugin.parameters:
|
||||
if param.global_alias:
|
||||
self._global_alias_map[plugin.name][param.global_alias] = param
|
||||
self._list_of_global_aliases.add(param.global_alias)
|
||||
|
||||
def add_source(self, source):
|
||||
if source in self.sources:
|
||||
raise Exception("Source has already been added.")
|
||||
self.sources.append(source)
|
||||
|
||||
def add_global_alias(self, alias, value, source):
|
||||
if source not in self.sources:
|
||||
msg = "Source '{}' has not been added to the plugin cache."
|
||||
raise RuntimeError(msg.format(source))
|
||||
|
||||
if not self.is_global_alias(alias):
|
||||
msg = "'{} is not a valid global alias'"
|
||||
raise RuntimeError(msg.format(alias))
|
||||
|
||||
self.global_alias_values[alias][source] = value
|
||||
|
||||
def add_configs(self, plugin_name, values, source):
|
||||
if self.is_global_alias(plugin_name):
|
||||
self.add_global_alias(plugin_name, values, source)
|
||||
return
|
||||
for name, value in values.iteritems():
|
||||
self.add_config(plugin_name, name, value, source)
|
||||
|
||||
def add_config(self, plugin_name, name, value, source):
|
||||
if source not in self.sources:
|
||||
msg = "Source '{}' has not been added to the plugin cache."
|
||||
raise RuntimeError(msg.format(source))
|
||||
|
||||
if (not self.loader.has_plugin(plugin_name) and
|
||||
plugin_name not in GENERIC_CONFIGS):
|
||||
msg = 'configuration provided for unknown plugin "{}"'
|
||||
raise ConfigError(msg.format(plugin_name))
|
||||
|
||||
if (plugin_name not in GENERIC_CONFIGS and
|
||||
name not in self.get_plugin_parameters(plugin_name)):
|
||||
msg = "'{}' is not a valid parameter for '{}'"
|
||||
raise ConfigError(msg.format(name, plugin_name))
|
||||
|
||||
self.plugin_configs[plugin_name][source][name] = value
|
||||
|
||||
def is_global_alias(self, name):
|
||||
return name in self._list_of_global_aliases
|
||||
|
||||
def get_plugin_config(self, plugin_name, generic_name=None):
|
||||
config = obj_dict(not_in_dict=['name'])
|
||||
config.name = plugin_name
|
||||
|
||||
if plugin_name not in GENERIC_CONFIGS:
|
||||
self._set_plugin_defaults(plugin_name, config)
|
||||
self._set_from_global_aliases(plugin_name, config)
|
||||
|
||||
if generic_name is None:
|
||||
# Perform a simple merge with the order of sources representing
|
||||
# priority
|
||||
plugin_config = self.plugin_configs[plugin_name]
|
||||
for source in self.sources:
|
||||
if source not in plugin_config:
|
||||
continue
|
||||
for name, value in plugin_config[source].iteritems():
|
||||
cfg_points[name].set_value(config, value=value)
|
||||
else:
|
||||
# A more complicated merge that involves priority of sources and
|
||||
# specificity
|
||||
self._merge_using_priority_specificity(plugin_name, generic_name, config)
|
||||
|
||||
return config
|
||||
|
||||
def get_plugin(self, name, kind=None, *args, **kwargs):
|
||||
config = self.get_plugin_config(name)
|
||||
kwargs = dict(config.items() + kwargs.items())
|
||||
return self.loader.get_plugin(name, kind=kind, *args, **kwargs)
|
||||
|
||||
@memoized
|
||||
def get_plugin_parameters(self, name):
|
||||
if name in self.targets:
|
||||
return self._get_target_params(name)
|
||||
params = self.loader.get_plugin_class(name).parameters
|
||||
return {param.name: param for param in params}
|
||||
|
||||
def _set_plugin_defaults(self, plugin_name, config):
|
||||
cfg_points = self.get_plugin_parameters(plugin_name)
|
||||
for cfg_point in cfg_points.itervalues():
|
||||
cfg_point.set_value(config, check_mandatory=False)
|
||||
|
||||
def _set_from_global_aliases(self, plugin_name, config):
|
||||
for alias, param in self._global_alias_map[plugin_name].iteritems():
|
||||
if alias in self.global_alias_values:
|
||||
for source in self.sources:
|
||||
if source not in self.global_alias_values[alias]:
|
||||
continue
|
||||
val = self.global_alias_values[alias][source]
|
||||
param.set_value(config, value=val)
|
||||
|
||||
def _get_target_params(self, name):
|
||||
td = self.targets[name]
|
||||
params = {p.name: p for p in chain(td.target_params, td.platform_params)}
|
||||
#params['connection_settings'] = {p.name: p for p in td.conn_params}
|
||||
return params
|
||||
|
||||
# pylint: disable=too-many-nested-blocks, too-many-branches
|
||||
def _merge_using_priority_specificity(self, specific_name,
|
||||
generic_name, final_config):
|
||||
"""
|
||||
WA configuration can come from various sources of increasing priority,
|
||||
as well as being specified in a generic and specific manner (e.g
|
||||
``device_config`` and ``nexus10`` respectivly). WA has two rules for
|
||||
the priority of configuration:
|
||||
|
||||
- Configuration from higher priority sources overrides
|
||||
configuration from lower priority sources.
|
||||
- More specific configuration overrides less specific configuration.
|
||||
|
||||
There is a situation where these two rules come into conflict. When a
|
||||
generic configuration is given in config source of high priority and a
|
||||
specific configuration is given in a config source of lower priority.
|
||||
In this situation it is not possible to know the end users intention
|
||||
and WA will error.
|
||||
|
||||
:param generic_name: The name of the generic configuration
|
||||
e.g ``device_config``
|
||||
:param specific_name: The name of the specific configuration used
|
||||
e.g ``nexus10``
|
||||
:param cfg_point: A dict of ``ConfigurationPoint``s to be used when
|
||||
merging configuration. keys=config point name,
|
||||
values=config point
|
||||
|
||||
:rtype: A fully merged and validated configuration in the form of a
|
||||
obj_dict.
|
||||
"""
|
||||
generic_config = copy(self.plugin_configs[generic_name])
|
||||
specific_config = copy(self.plugin_configs[specific_name])
|
||||
cfg_points = self.get_plugin_parameters(specific_name)
|
||||
sources = self.sources
|
||||
seen_specific_config = defaultdict(list)
|
||||
|
||||
# set_value uses the 'name' attribute of the passed object in it error
|
||||
# messages, to ensure these messages make sense the name will have to be
|
||||
# changed several times during this function.
|
||||
final_config.name = specific_name
|
||||
|
||||
# pylint: disable=too-many-nested-blocks
|
||||
for source in sources:
|
||||
try:
|
||||
if source in generic_config:
|
||||
final_config.name = generic_name
|
||||
for name, cfg_point in cfg_points.iteritems():
|
||||
if name in generic_config[source]:
|
||||
if name in seen_specific_config:
|
||||
msg = ('"{generic_name}" configuration "{config_name}" has already been '
|
||||
'specified more specifically for {specific_name} in:\n\t\t{sources}')
|
||||
msg = msg.format(generic_name=generic_name,
|
||||
config_name=name,
|
||||
specific_name=specific_name,
|
||||
sources=", ".join(seen_specific_config[name]))
|
||||
raise ConfigError(msg)
|
||||
value = generic_config[source][name]
|
||||
cfg_point.set_value(final_config, value, check_mandatory=False)
|
||||
|
||||
if source in specific_config:
|
||||
final_config.name = specific_name
|
||||
for name, cfg_point in cfg_points.iteritems():
|
||||
if name in specific_config[source]:
|
||||
seen_specific_config[name].append(str(source))
|
||||
value = specific_config[source][name]
|
||||
cfg_point.set_value(final_config, value, check_mandatory=False)
|
||||
|
||||
except ConfigError as e:
|
||||
raise ConfigError('Error in "{}":\n\t{}'.format(source, str(e)))
|
||||
|
||||
# Validate final configuration
|
||||
final_config.name = specific_name
|
||||
for cfg_point in cfg_points.itervalues():
|
||||
cfg_point.validate(final_config)
|
89
wa/framework/configuration/tree.py
Normal file
89
wa/framework/configuration/tree.py
Normal file
@@ -0,0 +1,89 @@
|
||||
# Copyright 2016 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
class JobSpecSource(object):
|
||||
|
||||
kind = ""
|
||||
|
||||
def __init__(self, config, parent=None):
|
||||
self.config = config
|
||||
self.parent = parent
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self.config['id']
|
||||
|
||||
def name(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class WorkloadEntry(JobSpecSource):
|
||||
kind = "workload"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self.parent.id == "global":
|
||||
return 'workload "{}"'.format(self.id)
|
||||
else:
|
||||
return 'workload "{}" from section "{}"'.format(self.id, self.parent.id)
|
||||
|
||||
|
||||
class SectionNode(JobSpecSource):
|
||||
|
||||
kind = "section"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self.id == "global":
|
||||
return "globally specified configuration"
|
||||
else:
|
||||
return 'section "{}"'.format(self.id)
|
||||
|
||||
@property
|
||||
def is_leaf(self):
|
||||
return not bool(self.children)
|
||||
|
||||
def __init__(self, config, parent=None):
|
||||
super(SectionNode, self).__init__(config, parent=parent)
|
||||
self.workload_entries = []
|
||||
self.children = []
|
||||
|
||||
def add_section(self, section):
|
||||
new_node = SectionNode(section, parent=self)
|
||||
self.children.append(new_node)
|
||||
return new_node
|
||||
|
||||
def add_workload(self, workload_config):
|
||||
self.workload_entries.append(WorkloadEntry(workload_config, self))
|
||||
|
||||
def descendants(self):
|
||||
for child in self.children:
|
||||
for n in child.descendants():
|
||||
yield n
|
||||
yield child
|
||||
|
||||
def ancestors(self):
|
||||
if self.parent is not None:
|
||||
yield self.parent
|
||||
for ancestor in self.parent.ancestors():
|
||||
yield ancestor
|
||||
|
||||
def leaves(self):
|
||||
if self.is_leaf:
|
||||
yield self
|
||||
else:
|
||||
for n in self.descendants():
|
||||
if n.is_leaf:
|
||||
yield n
|
Reference in New Issue
Block a user