mirror of
				https://github.com/ARM-software/workload-automation.git
				synced 2025-10-30 22:54:18 +00:00 
			
		
		
		
	Reworked configuration
All config now uses configuration points Config parsing is now done in destinct stages - first all files are parsed and sent to their corresponding config objects or to a tree - tree is traversed to generate job specs.
This commit is contained in:
		| @@ -1,261 +0,0 @@ | ||||
| #    Copyright 2015 ARM Limited | ||||
| # | ||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||||
| # you may not use this file except in compliance with the License. | ||||
| # You may obtain a copy of the License at | ||||
| # | ||||
| #     http://www.apache.org/licenses/LICENSE-2.0 | ||||
| # | ||||
| # Unless required by applicable law or agreed to in writing, software | ||||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | ||||
| # | ||||
|  | ||||
| import os | ||||
| from copy import copy | ||||
| from collections import OrderedDict, defaultdict | ||||
| import yaml | ||||
|  | ||||
| from wlauto.exceptions import ConfigError | ||||
| from wlauto.utils.misc import load_struct_from_yaml, LoadSyntaxError | ||||
| from wlauto.utils.types import counter, reset_counter | ||||
|  | ||||
|  | ||||
| def get_aliased_param(d, aliases, default=None, pop=True): | ||||
|     alias_map = [i for i, a in enumerate(aliases) if a in d] | ||||
|     if len(alias_map) > 1: | ||||
|         message = 'Only one of {} may be specified in a single entry' | ||||
|         raise ConfigError(message.format(aliases)) | ||||
|     elif alias_map: | ||||
|         if pop: | ||||
|             return d.pop(aliases[alias_map[0]]) | ||||
|         else: | ||||
|             return d[aliases[alias_map[0]]] | ||||
|     else: | ||||
|         return default | ||||
|  | ||||
|  | ||||
| class AgendaEntry(object): | ||||
|  | ||||
|     def to_dict(self): | ||||
|         return copy(self.__dict__) | ||||
|  | ||||
|  | ||||
| class AgendaWorkloadEntry(AgendaEntry): | ||||
|     """ | ||||
|     Specifies execution of a workload, including things like the number of | ||||
|     iterations, device runtime_parameters configuration, etc. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, **kwargs): | ||||
|         super(AgendaWorkloadEntry, self).__init__() | ||||
|         self.id = kwargs.pop('id') | ||||
|         self.workload_name = get_aliased_param(kwargs, ['workload_name', 'name']) | ||||
|         if not self.workload_name: | ||||
|             raise ConfigError('No workload name specified in entry {}'.format(self.id)) | ||||
|         self.label = kwargs.pop('label', self.workload_name) | ||||
|         self.number_of_iterations = kwargs.pop('iterations', None) | ||||
|         self.boot_parameters = get_aliased_param(kwargs, | ||||
|                                                  ['boot_parameters', 'boot_params'], | ||||
|                                                  default=OrderedDict()) | ||||
|         self.runtime_parameters = get_aliased_param(kwargs, | ||||
|                                                     ['runtime_parameters', 'runtime_params'], | ||||
|                                                     default=OrderedDict()) | ||||
|         self.workload_parameters = get_aliased_param(kwargs, | ||||
|                                                      ['workload_parameters', 'workload_params', 'params'], | ||||
|                                                      default=OrderedDict()) | ||||
|         self.instrumentation = kwargs.pop('instrumentation', []) | ||||
|         self.flash = kwargs.pop('flash', OrderedDict()) | ||||
|         self.classifiers = kwargs.pop('classifiers', OrderedDict()) | ||||
|         if kwargs: | ||||
|             raise ConfigError('Invalid entry(ies) in workload {}: {}'.format(self.id, ', '.join(kwargs.keys()))) | ||||
|  | ||||
|  | ||||
| class AgendaSectionEntry(AgendaEntry): | ||||
|     """ | ||||
|     Specifies execution of a workload, including things like the number of | ||||
|     iterations, device runtime_parameters configuration, etc. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, agenda, **kwargs): | ||||
|         super(AgendaSectionEntry, self).__init__() | ||||
|         self.id = kwargs.pop('id') | ||||
|         self.number_of_iterations = kwargs.pop('iterations', None) | ||||
|         self.boot_parameters = get_aliased_param(kwargs, | ||||
|                                                  ['boot_parameters', 'boot_params'], | ||||
|                                                  default=OrderedDict()) | ||||
|         self.runtime_parameters = get_aliased_param(kwargs, | ||||
|                                                     ['runtime_parameters', 'runtime_params', 'params'], | ||||
|                                                     default=OrderedDict()) | ||||
|         self.workload_parameters = get_aliased_param(kwargs, | ||||
|                                                      ['workload_parameters', 'workload_params'], | ||||
|                                                      default=OrderedDict()) | ||||
|         self.instrumentation = kwargs.pop('instrumentation', []) | ||||
|         self.flash = kwargs.pop('flash', OrderedDict()) | ||||
|         self.classifiers = kwargs.pop('classifiers', OrderedDict()) | ||||
|         self.workloads = [] | ||||
|         for w in kwargs.pop('workloads', []): | ||||
|             self.workloads.append(agenda.get_workload_entry(w)) | ||||
|         if kwargs: | ||||
|             raise ConfigError('Invalid entry(ies) in section {}: {}'.format(self.id, ', '.join(kwargs.keys()))) | ||||
|  | ||||
|     def to_dict(self): | ||||
|         d = copy(self.__dict__) | ||||
|         d['workloads'] = [w.to_dict() for w in self.workloads] | ||||
|         return d | ||||
|  | ||||
|  | ||||
| class AgendaGlobalEntry(AgendaEntry): | ||||
|     """ | ||||
|     Workload configuration global to all workloads. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, **kwargs): | ||||
|         super(AgendaGlobalEntry, self).__init__() | ||||
|         self.number_of_iterations = kwargs.pop('iterations', None) | ||||
|         self.boot_parameters = get_aliased_param(kwargs, | ||||
|                                                  ['boot_parameters', 'boot_params'], | ||||
|                                                  default=OrderedDict()) | ||||
|         self.runtime_parameters = get_aliased_param(kwargs, | ||||
|                                                     ['runtime_parameters', 'runtime_params', 'params'], | ||||
|                                                     default=OrderedDict()) | ||||
|         self.workload_parameters = get_aliased_param(kwargs, | ||||
|                                                      ['workload_parameters', 'workload_params'], | ||||
|                                                      default=OrderedDict()) | ||||
|         self.instrumentation = kwargs.pop('instrumentation', []) | ||||
|         self.flash = kwargs.pop('flash', OrderedDict()) | ||||
|         self.classifiers = kwargs.pop('classifiers', OrderedDict()) | ||||
|         if kwargs: | ||||
|             raise ConfigError('Invalid entries in global section: {}'.format(kwargs)) | ||||
|  | ||||
|  | ||||
| class Agenda(object): | ||||
|  | ||||
|     def __init__(self, source=None): | ||||
|         self.filepath = None | ||||
|         self.config = {} | ||||
|         self.global_ = None | ||||
|         self.sections = [] | ||||
|         self.workloads = [] | ||||
|         self._seen_ids = defaultdict(set) | ||||
|         if source: | ||||
|             try: | ||||
|                 reset_counter('section') | ||||
|                 reset_counter('workload') | ||||
|                 self._load(source) | ||||
|             except (ConfigError, LoadSyntaxError, SyntaxError), e: | ||||
|                 raise ConfigError(str(e)) | ||||
|  | ||||
|     def add_workload_entry(self, w): | ||||
|         entry = self.get_workload_entry(w) | ||||
|         self.workloads.append(entry) | ||||
|  | ||||
|     def get_workload_entry(self, w): | ||||
|         if isinstance(w, basestring): | ||||
|             w = {'name': w} | ||||
|         if not isinstance(w, dict): | ||||
|             raise ConfigError('Invalid workload entry: "{}" in {}'.format(w, self.filepath)) | ||||
|         self._assign_id_if_needed(w, 'workload') | ||||
|         return AgendaWorkloadEntry(**w) | ||||
|  | ||||
|     def _load(self, source):  # pylint: disable=too-many-branches | ||||
|         try: | ||||
|             raw = self._load_raw_from_source(source) | ||||
|         except ValueError as e: | ||||
|             name = getattr(source, 'name', '') | ||||
|             raise ConfigError('Error parsing agenda {}: {}'.format(name, e)) | ||||
|         if not isinstance(raw, dict): | ||||
|             message = '{} does not contain a valid agenda structure; top level must be a dict.' | ||||
|             raise ConfigError(message.format(self.filepath)) | ||||
|         for k, v in raw.iteritems(): | ||||
|             if v is None: | ||||
|                 raise ConfigError('Empty "{}" entry in {}'.format(k, self.filepath)) | ||||
|  | ||||
|             if k == 'config': | ||||
|                 if not isinstance(v, dict): | ||||
|                     raise ConfigError('Invalid agenda: "config" entry must be a dict') | ||||
|                 self.config = v | ||||
|             elif k == 'global': | ||||
|                 self.global_ = AgendaGlobalEntry(**v) | ||||
|             elif k == 'sections': | ||||
|                 self._collect_existing_ids(v, 'section') | ||||
|                 for s in v: | ||||
|                     if not isinstance(s, dict): | ||||
|                         raise ConfigError('Invalid section entry: "{}" in {}'.format(s, self.filepath)) | ||||
|                     self._collect_existing_ids(s.get('workloads', []), 'workload') | ||||
|                 for s in v: | ||||
|                     self._assign_id_if_needed(s, 'section') | ||||
|                     self.sections.append(AgendaSectionEntry(self, **s)) | ||||
|             elif k == 'workloads': | ||||
|                 self._collect_existing_ids(v, 'workload') | ||||
|                 for w in v: | ||||
|                     self.workloads.append(self.get_workload_entry(w)) | ||||
|             else: | ||||
|                 raise ConfigError('Unexpected agenda entry "{}" in {}'.format(k, self.filepath)) | ||||
|  | ||||
|     def _load_raw_from_source(self, source): | ||||
|         if hasattr(source, 'read') and hasattr(source, 'name'):  # file-like object | ||||
|             self.filepath = source.name | ||||
|             raw = load_struct_from_yaml(text=source.read()) | ||||
|         elif isinstance(source, basestring): | ||||
|             if os.path.isfile(source): | ||||
|                 self.filepath = source | ||||
|                 raw = load_struct_from_yaml(filepath=self.filepath) | ||||
|             else:  # assume YAML text | ||||
|                 raw = load_struct_from_yaml(text=source) | ||||
|         else: | ||||
|             raise ConfigError('Unknown agenda source: {}'.format(source)) | ||||
|         return raw | ||||
|  | ||||
|     def _collect_existing_ids(self, ds, pool): | ||||
|         # Collection needs to take place first  so that auto IDs can be | ||||
|         # correctly assigned, e.g. if someone explicitly specified an ID | ||||
|         # of '1' for one of the workloads. | ||||
|         for d in ds: | ||||
|             if isinstance(d, dict) and 'id' in d: | ||||
|                 did = str(d['id']) | ||||
|                 if did in self._seen_ids[pool]: | ||||
|                     raise ConfigError('Duplicate {} ID: {}'.format(pool, did)) | ||||
|                 self._seen_ids[pool].add(did) | ||||
|  | ||||
|     def _assign_id_if_needed(self, d, pool): | ||||
|         # Also enforces string IDs | ||||
|         if d.get('id') is None: | ||||
|             did = str(counter(pool)) | ||||
|             while did in self._seen_ids[pool]: | ||||
|                 did = str(counter(pool)) | ||||
|             d['id'] = did | ||||
|             self._seen_ids[pool].add(did) | ||||
|         else: | ||||
|             d['id'] = str(d['id']) | ||||
|  | ||||
|  | ||||
| # Modifying the yaml parser to use  an OrderedDict, rather then regular Python | ||||
| # dict for mappings. This preservers the order in which the items are | ||||
| # specified. See | ||||
| #   http://stackoverflow.com/a/21048064 | ||||
|  | ||||
| _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG | ||||
|  | ||||
|  | ||||
| def dict_representer(dumper, data): | ||||
|     return dumper.represent_mapping(_mapping_tag, data.iteritems()) | ||||
|  | ||||
|  | ||||
| def dict_constructor(loader, node): | ||||
|     pairs = loader.construct_pairs(node) | ||||
|     seen_keys = set() | ||||
|     for k, _ in pairs: | ||||
|         if k in seen_keys: | ||||
|             raise ValueError('Duplicate entry: {}'.format(k)) | ||||
|         seen_keys.add(k) | ||||
|     return OrderedDict(pairs) | ||||
|  | ||||
|  | ||||
| yaml.add_representer(OrderedDict, dict_representer) | ||||
| yaml.add_constructor(_mapping_tag, dict_constructor) | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										20
									
								
								wlauto/core/configuration/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								wlauto/core/configuration/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | ||||
| #    Copyright 2013-2016 ARM Limited | ||||
| # | ||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||||
| # you may not use this file except in compliance with the License. | ||||
| # You may obtain a copy of the License at | ||||
| # | ||||
| #     http://www.apache.org/licenses/LICENSE-2.0 | ||||
| # | ||||
| # Unless required by applicable law or agreed to in writing, software | ||||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | ||||
| # | ||||
| from wlauto.core.configuration.configuration import (settings, | ||||
|                                                      WAConfiguration, | ||||
|                                                      RunConfiguration, | ||||
|                                                      JobsConfiguration, | ||||
|                                                      ConfigurationPoint) | ||||
| from wlauto.core.configuration.plugin_cache import PluginCache | ||||
							
								
								
									
										614
									
								
								wlauto/core/configuration/configuration.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										614
									
								
								wlauto/core/configuration/configuration.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,614 @@ | ||||
| #    Copyright 2014-2016 ARM Limited | ||||
| # | ||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||||
| # you may not use this file except in compliance with the License. | ||||
| # You may obtain a copy of the License at | ||||
| # | ||||
| #     http://www.apache.org/licenses/LICENSE-2.0 | ||||
| # | ||||
| # Unless required by applicable law or agreed to in writing, software | ||||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | ||||
|  | ||||
| import os | ||||
| from copy import copy | ||||
| from collections import OrderedDict | ||||
|  | ||||
| from wlauto.exceptions import ConfigError | ||||
| from wlauto.utils.misc import (get_article, merge_config_values) | ||||
| from wlauto.utils.types import (identifier, integer, boolean, | ||||
|                                 list_of_strings, toggle_set) | ||||
| from wlauto.core.configuration.tree import Node | ||||
|  | ||||
| ########################## | ||||
| ### CONFIG POINT TYPES ### | ||||
| ########################## | ||||
|  | ||||
|  | ||||
| class RebootPolicy(object): | ||||
|     """ | ||||
|     Represents the reboot policy for the execution -- at what points the device | ||||
|     should be rebooted. This, in turn, is controlled by the policy value that is | ||||
|     passed in on construction and would typically be read from the user's settings. | ||||
|     Valid policy values are: | ||||
|  | ||||
|     :never: The device will never be rebooted. | ||||
|     :as_needed: Only reboot the device if it becomes unresponsive, or needs to be flashed, etc. | ||||
|     :initial: The device will be rebooted when the execution first starts, just before | ||||
|               executing the first workload spec. | ||||
|     :each_spec: The device will be rebooted before running a new workload spec. | ||||
|     :each_iteration: The device will be rebooted before each new iteration. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     valid_policies = ['never', 'as_needed', 'initial', 'each_spec', 'each_iteration'] | ||||
|  | ||||
|     def __init__(self, policy): | ||||
|         policy = policy.strip().lower().replace(' ', '_') | ||||
|         if policy not in self.valid_policies: | ||||
|             message = 'Invalid reboot policy {}; must be one of {}'.format(policy, ', '.join(self.valid_policies)) | ||||
|             raise ConfigError(message) | ||||
|         self.policy = policy | ||||
|  | ||||
|     @property | ||||
|     def can_reboot(self): | ||||
|         return self.policy != 'never' | ||||
|  | ||||
|     @property | ||||
|     def perform_initial_boot(self): | ||||
|         return self.policy not in ['never', 'as_needed'] | ||||
|  | ||||
|     @property | ||||
|     def reboot_on_each_spec(self): | ||||
|         return self.policy in ['each_spec', 'each_iteration'] | ||||
|  | ||||
|     @property | ||||
|     def reboot_on_each_iteration(self): | ||||
|         return self.policy == 'each_iteration' | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.policy | ||||
|  | ||||
|     __repr__ = __str__ | ||||
|  | ||||
|     def __cmp__(self, other): | ||||
|         if isinstance(other, RebootPolicy): | ||||
|             return cmp(self.policy, other.policy) | ||||
|         else: | ||||
|             return cmp(self.policy, other) | ||||
|  | ||||
|     def to_pod(self): | ||||
|         return self.policy | ||||
|  | ||||
|     @staticmethod | ||||
|     def from_pod(pod): | ||||
|         return RebootPolicy(pod) | ||||
|  | ||||
|  | ||||
| class status_list(list): | ||||
|  | ||||
|     def append(self, item): | ||||
|         list.append(self, str(item).upper()) | ||||
|  | ||||
|  | ||||
| class LoggingConfig(dict): | ||||
|  | ||||
|     defaults = { | ||||
|         'file_format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s', | ||||
|         'verbose_format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s', | ||||
|         'regular_format': '%(levelname)-8s %(message)s', | ||||
|         'color': True, | ||||
|     } | ||||
|  | ||||
|     def __init__(self, config=None): | ||||
|         dict.__init__(self) | ||||
|         if isinstance(config, dict): | ||||
|             config = {identifier(k.lower()): v for k, v in config.iteritems()} | ||||
|             self['regular_format'] = config.pop('regular_format', self.defaults['regular_format']) | ||||
|             self['verbose_format'] = config.pop('verbose_format', self.defaults['verbose_format']) | ||||
|             self['file_format'] = config.pop('file_format', self.defaults['file_format']) | ||||
|             self['color'] = config.pop('colour_enabled', self.defaults['color'])  # legacy | ||||
|             self['color'] = config.pop('color', self.defaults['color']) | ||||
|             if config: | ||||
|                 message = 'Unexpected logging configuation parameters: {}' | ||||
|                 raise ValueError(message.format(bad_vals=', '.join(config.keys()))) | ||||
|         elif config is None: | ||||
|             for k, v in self.defaults.iteritems(): | ||||
|                 self[k] = v | ||||
|         else: | ||||
|             raise ValueError(config) | ||||
|  | ||||
|  | ||||
| class ConfigurationPoint(object): | ||||
|     """ | ||||
|     This defines a generic configuration point for workload automation. This is | ||||
|     used to handle global settings, plugin parameters, etc. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     # Mapping for kind conversion; see docs for convert_types below | ||||
|     kind_map = { | ||||
|         int: integer, | ||||
|         bool: boolean, | ||||
|         dict: OrderedDict, | ||||
|     } | ||||
|  | ||||
|     def __init__(self, name, | ||||
|                  kind=None, | ||||
|                  mandatory=None, | ||||
|                  default=None, | ||||
|                  override=False, | ||||
|                  allowed_values=None, | ||||
|                  description=None, | ||||
|                  constraint=None, | ||||
|                  merge=False, | ||||
|                  aliases=None, | ||||
|                  convert_types=True): | ||||
|         """ | ||||
|         Create a new Parameter object. | ||||
|  | ||||
|         :param name: The name of the parameter. This will become an instance | ||||
|                      member of the plugin object to which the parameter is | ||||
|                      applied, so it must be a valid python  identifier. This | ||||
|                      is the only mandatory parameter. | ||||
|         :param kind: The type of parameter this is. This must be a callable | ||||
|                      that takes an arbitrary object and converts it to the | ||||
|                      expected type, or raised ``ValueError`` if such conversion | ||||
|                      is not possible. Most Python standard types -- ``str``, | ||||
|                      ``int``, ``bool``, etc. -- can be used here. This | ||||
|                      defaults to ``str`` if not specified. | ||||
|         :param mandatory: If set to ``True``, then a non-``None`` value for | ||||
|                           this parameter *must* be provided on plugin | ||||
|                           object construction, otherwise ``ConfigError`` | ||||
|                           will be raised. | ||||
|         :param default: The default value for this parameter. If no value | ||||
|                         is specified on plugin construction, this value | ||||
|                         will be used instead. (Note: if this is specified | ||||
|                         and is not ``None``, then ``mandatory`` parameter | ||||
|                         will be ignored). | ||||
|         :param override: A ``bool`` that specifies whether a parameter of | ||||
|                          the same name further up the hierarchy should | ||||
|                          be overridden. If this is ``False`` (the | ||||
|                          default), an exception will be raised by the | ||||
|                          ``AttributeCollection`` instead. | ||||
|         :param allowed_values: This should be the complete list of allowed | ||||
|                                values for this parameter.  Note: ``None`` | ||||
|                                value will always be allowed, even if it is | ||||
|                                not in this list.  If you want to disallow | ||||
|                                ``None``, set ``mandatory`` to ``True``. | ||||
|         :param constraint: If specified, this must be a callable that takes | ||||
|                            the parameter value as an argument and return a | ||||
|                            boolean indicating whether the constraint has been | ||||
|                            satisfied. Alternatively, can be a two-tuple with | ||||
|                            said callable as the first element and a string | ||||
|                            describing the constraint as the second. | ||||
|         :param merge: The default behaviour when setting a value on an object | ||||
|                       that already has that attribute is to overrided with | ||||
|                       the new value. If this is set to ``True`` then the two | ||||
|                       values will be merged instead. The rules by which the | ||||
|                       values are merged will be determined by the types of | ||||
|                       the existing and new values -- see | ||||
|                       ``merge_config_values`` documentation for details. | ||||
|         :param aliases: Alternative names for the same configuration point. | ||||
|                         These are largely for backwards compatibility. | ||||
|         :param convert_types: If ``True`` (the default), will automatically | ||||
|                               convert ``kind`` values from native Python | ||||
|                               types to WA equivalents. This allows more | ||||
|                               ituitive interprestation of parameter values, | ||||
|                               e.g. the string ``"false"`` being interpreted | ||||
|                               as ``False`` when specifed as the value for | ||||
|                               a boolean Parameter. | ||||
|  | ||||
|         """ | ||||
|         self.name = identifier(name) | ||||
|         if kind is not None and not callable(kind): | ||||
|             raise ValueError('Kind must be callable.') | ||||
|         if convert_types and kind in self.kind_map: | ||||
|             kind = self.kind_map[kind] | ||||
|         self.kind = kind | ||||
|         self.mandatory = mandatory | ||||
|         self.default = default | ||||
|         self.override = override | ||||
|         self.allowed_values = allowed_values | ||||
|         self.description = description | ||||
|         if self.kind is None and not self.override: | ||||
|             self.kind = str | ||||
|         if constraint is not None and not callable(constraint) and not isinstance(constraint, tuple): | ||||
|             raise ValueError('Constraint must be callable or a (callable, str) tuple.') | ||||
|         self.constraint = constraint | ||||
|         self.merge = merge | ||||
|         self.aliases = aliases or [] | ||||
|  | ||||
|     def match(self, name): | ||||
|         if name == self.name: | ||||
|             return True | ||||
|         elif name in self.aliases: | ||||
|             return True | ||||
|         return False | ||||
|  | ||||
|     def set_value(self, obj, value=None, check_mandatory=True): | ||||
|         if value is None: | ||||
|             if self.default is not None: | ||||
|                 value = self.default | ||||
|             elif check_mandatory and self.mandatory: | ||||
|                 msg = 'No values specified for mandatory parameter "{}" in {}' | ||||
|                 raise ConfigError(msg.format(self.name, obj.name)) | ||||
|         else: | ||||
|             try: | ||||
|                 value = self.kind(value) | ||||
|             except (ValueError, TypeError): | ||||
|                 typename = self.get_type_name() | ||||
|                 msg = 'Bad value "{}" for {}; must be {} {}' | ||||
|                 article = get_article(typename) | ||||
|                 raise ConfigError(msg.format(value, self.name, article, typename)) | ||||
|         if value is not None: | ||||
|             self.validate_value(obj.name, value) | ||||
|         if self.merge and hasattr(obj, self.name): | ||||
|             value = merge_config_values(getattr(obj, self.name), value) | ||||
|         setattr(obj, self.name, value) | ||||
|  | ||||
|     def get_type_name(self): | ||||
|         typename = str(self.kind) | ||||
|         if '\'' in typename: | ||||
|             typename = typename.split('\'')[1] | ||||
|         elif typename.startswith('<function'): | ||||
|             typename = typename.split()[1] | ||||
|         return typename | ||||
|  | ||||
|     def validate(self, obj): | ||||
|         value = getattr(obj, self.name, None) | ||||
|         if value is not None: | ||||
|             self.validate_value(obj.name, value) | ||||
|         else: | ||||
|             if self.mandatory: | ||||
|                 msg = 'No value specified for mandatory parameter "{}" in {}.' | ||||
|                 raise ConfigError(msg.format(self.name, obj.name)) | ||||
|  | ||||
|     def validate_value(self, name, value): | ||||
|         if self.allowed_values: | ||||
|             self.validate_allowed_values(name, value) | ||||
|         if self.constraint: | ||||
|             self.validate_constraint(name, value) | ||||
|  | ||||
|     def validate_allowed_values(self, name, value): | ||||
|         if 'list' in str(self.kind): | ||||
|             for v in value: | ||||
|                 if v not in self.allowed_values: | ||||
|                     msg = 'Invalid value {} for {} in {}; must be in {}' | ||||
|                     raise ConfigError(msg.format(v, self.name, name, self.allowed_values)) | ||||
|         else: | ||||
|             if value not in self.allowed_values: | ||||
|                 msg = 'Invalid value {} for {} in {}; must be in {}' | ||||
|                 raise ConfigError(msg.format(value, self.name, name, self.allowed_values)) | ||||
|  | ||||
|     def validate_constraint(self, name, value): | ||||
|         msg_vals = {'value': value, 'param': self.name, 'plugin': name} | ||||
|         if isinstance(self.constraint, tuple) and len(self.constraint) == 2: | ||||
|             constraint, msg = self.constraint  # pylint: disable=unpacking-non-sequence | ||||
|         elif callable(self.constraint): | ||||
|             constraint = self.constraint | ||||
|             msg = '"{value}" failed constraint validation for "{param}" in "{plugin}".' | ||||
|         else: | ||||
|             raise ValueError('Invalid constraint for "{}": must be callable or a 2-tuple'.format(self.name)) | ||||
|         if not constraint(value): | ||||
|             raise ConfigError(value, msg.format(**msg_vals)) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         d = copy(self.__dict__) | ||||
|         del d['description'] | ||||
|         return 'ConfPoint({})'.format(d) | ||||
|  | ||||
|     __str__ = __repr__ | ||||
|  | ||||
|  | ||||
| ##################### | ||||
| ### Configuration ### | ||||
| ##################### | ||||
|  | ||||
|  | ||||
| class Configuration(object): | ||||
|  | ||||
|     __configuration = [] | ||||
|     name = "" | ||||
|     # The below line must be added to all subclasses | ||||
|     configuration = {cp.name: cp for cp in __configuration} | ||||
|  | ||||
|     def __init__(self): | ||||
|         self._finalized = False | ||||
|         for confpoint in self.configuration.itervalues(): | ||||
|             confpoint.set_value(self, check_mandatory=False) | ||||
|  | ||||
|     def set(self, name, value): | ||||
|         if self._finalized: | ||||
|             raise RuntimeError("Cannot set configuration after it has been finalized.") | ||||
|         if name not in self.configuration: | ||||
|             raise ConfigError('Unknown {} configuration "{}"'.format(self.name, name)) | ||||
|         self.configuration[name].set_value(self, value) | ||||
|  | ||||
|     def update_config(self, values): | ||||
|         for k, v in values.iteritems(): | ||||
|             self.set(k, v) | ||||
|  | ||||
|     def finalize(self): | ||||
|         for c in self.configuration.itervalues(): | ||||
|             c.validate(self) | ||||
|         self._finalized = True | ||||
|  | ||||
|  | ||||
| # This configuration for the core WA framework | ||||
| class WAConfiguration(Configuration): | ||||
|  | ||||
|     name = "WA Configuration" | ||||
|     __configuration = [ | ||||
|         ConfigurationPoint( | ||||
|             'user_directory', | ||||
|             description=""" | ||||
|             Path to the user directory. This is the location WA will look for | ||||
|             user configuration, additional plugins and plugin dependencies. | ||||
|             """, | ||||
|             kind=str, | ||||
|             default=os.path.join(os.path.expanduser('~'), '.workload_automation'), | ||||
|         ), | ||||
|         ConfigurationPoint( | ||||
|             'plugin_packages', | ||||
|             kind=list_of_strings, | ||||
|             default=[ | ||||
|                 'wlauto.commands', | ||||
|                 'wlauto.workloads', | ||||
|                 'wlauto.instrumentation', | ||||
|                 'wlauto.result_processors', | ||||
|                 'wlauto.managers', | ||||
|                 'wlauto.resource_getters', | ||||
|             ], | ||||
|             description=""" | ||||
|             List of packages that will be scanned for WA plugins. | ||||
|             """, | ||||
|         ), | ||||
|         ConfigurationPoint( | ||||
|             'plugin_paths', | ||||
|             kind=list_of_strings, | ||||
|             default=[ | ||||
|                 'workloads', | ||||
|                 'instruments', | ||||
|                 'targets', | ||||
|                 'processors', | ||||
|  | ||||
|                 # Legacy | ||||
|                 'managers', | ||||
|                 'result_processors', | ||||
|             ], | ||||
|             description=""" | ||||
|             List of paths that will be scanned for WA plugins. | ||||
|             """, | ||||
|             merge=True | ||||
|         ), | ||||
|         ConfigurationPoint( | ||||
|             'plugin_ignore_paths', | ||||
|             kind=list_of_strings, | ||||
|             default=[], | ||||
|             description=""" | ||||
|             List of (sub)paths that will be ignored when scanning | ||||
|             ``plugin_paths`` for WA plugins. | ||||
|             """, | ||||
|         ), | ||||
|         ConfigurationPoint( | ||||
|             'assets_repository', | ||||
|             description=""" | ||||
|             The local mount point for the filer hosting WA assets. | ||||
|             """, | ||||
|         ), | ||||
|         ConfigurationPoint( | ||||
|             'logging', | ||||
|             kind=LoggingConfig, | ||||
|             default=LoggingConfig.defaults, | ||||
|             description=""" | ||||
|             WA logging configuration. This should be a dict with a subset | ||||
|             of the following keys:: | ||||
|  | ||||
|             :normal_format: Logging format used for console output | ||||
|             :verbose_format: Logging format used for verbose console output | ||||
|             :file_format: Logging format used for run.log | ||||
|             :color: If ``True`` (the default), console logging output will | ||||
|                     contain bash color escape codes. Set this to ``False`` if | ||||
|                     console output will be piped somewhere that does not know | ||||
|                     how to handle those. | ||||
|             """, | ||||
|         ), | ||||
|         ConfigurationPoint( | ||||
|             'verbosity', | ||||
|             kind=int, | ||||
|             default=0, | ||||
|             description=""" | ||||
|             Verbosity of console output. | ||||
|             """, | ||||
|         ), | ||||
|         ConfigurationPoint(  # TODO: Needs some format for dates ect/ comes from cfg | ||||
|             'default_output_directory', | ||||
|             default="wa_output", | ||||
|             description=""" | ||||
|             The default output directory that will be created if not | ||||
|             specified when invoking a run. | ||||
|             """, | ||||
|         ), | ||||
|     ] | ||||
|     configuration = {cp.name: cp for cp in __configuration} | ||||
|  | ||||
|     dependencies_directory = None  # TODO: What was this for? | ||||
|  | ||||
|  | ||||
| # This is generic top-level configuration for WA runs. | ||||
| class RunConfiguration(Configuration): | ||||
|  | ||||
|     name = "Run Configuration" | ||||
|     __configuration = [ | ||||
|         ConfigurationPoint('run_name', kind=str,  # TODO: Can only come from an agenda | ||||
|                            description=''' | ||||
|                            A descriptive name for this WA run. | ||||
|                            '''), | ||||
|         ConfigurationPoint('output_directory', kind=str, | ||||
|                            # default=settings.default_output_directory, | ||||
|                            description=''' | ||||
|                            The path where WA will output its results. | ||||
|                            '''), | ||||
|         ConfigurationPoint('project', kind=str, | ||||
|                            description=''' | ||||
|                            The project this WA run belongs too. | ||||
|                            '''), | ||||
|         ConfigurationPoint('project_stage', kind=dict, | ||||
|                            description=''' | ||||
|                            The stage of the project this WA run is from. | ||||
|                            '''), | ||||
|         ConfigurationPoint('execution_order', kind=str, default='by_iteration', | ||||
|                            allowed_values=None,  # TODO: | ||||
|                            description=''' | ||||
|                            The order that workload specs will be executed | ||||
|                            '''), | ||||
|         ConfigurationPoint('reboot_policy', kind=str, default='as_needed', | ||||
|                            allowed_values=RebootPolicy.valid_policies, | ||||
|                            description=''' | ||||
|                            How the device will be rebooted during the run. | ||||
|                            '''), | ||||
|         ConfigurationPoint('device', kind=str, | ||||
|                            description=''' | ||||
|                            The type of device this WA run will be executed on. | ||||
|                            '''), | ||||
|         ConfigurationPoint('retry_on_status', kind=status_list, | ||||
|                            default=status_list(['FAILED', 'PARTIAL']), | ||||
|                            allowed_values=None,  # TODO: - can it even be done? | ||||
|                            description=''' | ||||
|                            Which iteration results will lead to WA retrying. | ||||
|                            '''), | ||||
|         ConfigurationPoint('max_retries', kind=int, default=3, | ||||
|                            description=''' | ||||
|                            The number of times WA will attempt to retry a failed | ||||
|                            iteration. | ||||
|                            '''), | ||||
|     ] | ||||
|     configuration = {cp.name: cp for cp in __configuration} | ||||
|  | ||||
|  | ||||
| # This is the configuration for WA jobs | ||||
| class JobSpec(Configuration): | ||||
|     __configuration = [ | ||||
|         ConfigurationPoint('iterations', kind=int, default=1, | ||||
|                            description=''' | ||||
|                            How many times to repeat this workload spec | ||||
|                            '''), | ||||
|         ConfigurationPoint('workload_name', kind=str, mandatory=True, | ||||
|                            aliases=["name"], | ||||
|                            description=''' | ||||
|                            The name of the workload to run. | ||||
|                            '''), | ||||
|         ConfigurationPoint('label', kind=str, | ||||
|                            description=''' | ||||
|                            Similar to IDs but do not have the uniqueness restriction. | ||||
|                            If specified, labels will be used by some result | ||||
|                            processes instead of (or in addition to) the workload | ||||
|                            name. For example, the csv result processor will put | ||||
|                            the label in the "workload" column of the CSV file. | ||||
|                            '''), | ||||
|         ConfigurationPoint('runtime_parameters', kind=dict, merge=True, | ||||
|                            aliases=["runtime_params"], | ||||
|                            description=''' | ||||
|                            Rather than configuration for the workload, | ||||
|                            `runtime_parameters` allow you to change the configuration | ||||
|                            of the underlying device this particular workload spec. | ||||
|                            E.g. CPU frequencies, governor ect. | ||||
|                            '''), | ||||
|         ConfigurationPoint('boot_parameters', kind=dict, merge=True, | ||||
|                            aliases=["boot_params"], | ||||
|                            description=''' | ||||
|                            These work in a similar way to runtime_parameters, but | ||||
|                            they get passed to the device when it reboots. | ||||
|                            '''), | ||||
|         ConfigurationPoint('instrumentation', kind=toggle_set, merge=True, | ||||
|                            aliases=["instruments"], | ||||
|                            description=''' | ||||
|                            The instruments to enable (or disabled using a ~) | ||||
|                            during this workload spec. | ||||
|                            '''), | ||||
|         ConfigurationPoint('flash', kind=dict, merge=True, | ||||
|                            description=''' | ||||
|  | ||||
|                            '''), | ||||
|         ConfigurationPoint('classifiers', kind=dict, merge=True, | ||||
|                            description=''' | ||||
|                            Classifiers allow you to tag metrics from this workload | ||||
|                            spec to help in post processing them. Theses are often | ||||
|                            used to help identify what runtime_parameters were used | ||||
|                            for results when post processing. | ||||
|                            '''), | ||||
|     ] | ||||
|     configuration = {cp.name: cp for cp in __configuration} | ||||
|     #section id | ||||
|     #id mergering | ||||
|     id_parts = []  # pointers to entries | ||||
|  | ||||
|  | ||||
| # This is used to construct the WA configuration tree | ||||
| class JobsConfiguration(object): | ||||
|  | ||||
|     name = "Jobs Configuration" | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.sections = [] | ||||
|         self.workloads = [] | ||||
|         self.disabled_instruments = [] | ||||
|         self.root_node = Node(global_section()) | ||||
|         self._global_finalized = False | ||||
|  | ||||
|     def set_global_config(self, name, value): | ||||
|         if self._global_finalized: | ||||
|             raise RuntimeError("Cannot add global config once it has been finalized") | ||||
|         if name not in JobSpec.configuration: | ||||
|             raise ConfigError('Unknown global configuration "{}"'.format(name)) | ||||
|         JobSpec.configuration[name].set_value(self.root_node.config, value, | ||||
|                                               check_mandatory=False) | ||||
|  | ||||
|     def finalise_global_config(self): | ||||
|         for cfg_point in JobSpec.configuration.itervalues(): | ||||
|             cfg_point.validate(self.root_node.config) | ||||
|         self._global_finalized = True | ||||
|  | ||||
|     def add_section(self, section, workloads): | ||||
|         new_node = self.root_node.add_section(section) | ||||
|         for workload in workloads: | ||||
|             new_node.add_workload(workload) | ||||
|  | ||||
|     def add_workload(self, workload): | ||||
|         self.root_node.add_workload(workload) | ||||
|  | ||||
|     def disable_instruments(self, instruments): | ||||
|         self.disabled_instruments = instruments | ||||
|  | ||||
|     def only_run_ids(self): | ||||
|         pass | ||||
|  | ||||
|     def generate_job_specs(self): | ||||
|         for leaf in self.root_node.leaves(): | ||||
|             workloads = leaf.workloads | ||||
|             sections = [leaf.config] | ||||
|  | ||||
|             for ancestor in leaf.ancestors(): | ||||
|                 workloads += ancestor.workloads | ||||
|                 sections.insert(ancestor.config, 0) | ||||
|  | ||||
|             for workload in workloads: | ||||
|                 job_spec = JobSpec() | ||||
|                 for section in sections: | ||||
|                     job_spec.id_parts.append(section.pop("id")) | ||||
|                     job_spec.update_config(section) | ||||
|                 job_spec.id_parts.append(workload.pop("id")) | ||||
|                 job_spec.update_config(workload) | ||||
|                 yield job_spec | ||||
|  | ||||
|  | ||||
| class global_section(object): | ||||
|     name = "Global Configuration" | ||||
|  | ||||
|     def to_pod(self): | ||||
|         return self.__dict__.copy() | ||||
|  | ||||
| settings = WAConfiguration() | ||||
							
								
								
									
										278
									
								
								wlauto/core/configuration/parsers.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										278
									
								
								wlauto/core/configuration/parsers.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,278 @@ | ||||
| #    Copyright 2015 ARM Limited | ||||
| # | ||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||||
| # you may not use this file except in compliance with the License. | ||||
| # You may obtain a copy of the License at | ||||
| # | ||||
| #     http://www.apache.org/licenses/LICENSE-2.0 | ||||
| # | ||||
| # Unless required by applicable law or agreed to in writing, software | ||||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | ||||
| # | ||||
|  | ||||
| import os | ||||
|  | ||||
| from wlauto.exceptions import ConfigError | ||||
| from wlauto.utils.serializer import read_pod, SerializerSyntaxError | ||||
| from wlauto.utils.types import toggle_set, counter | ||||
| from wlauto.core.configuration.configuration import JobSpec | ||||
|  | ||||
| ######################## | ||||
| ### Helper functions ### | ||||
| ######################## | ||||
|  | ||||
|  | ||||
| def get_aliased_param(cfg_point, d, default=None, pop=True): | ||||
|     """ | ||||
|     Given a ConfigurationPoint and a dict, this function will search the dict for | ||||
|     the ConfigurationPoint's name/aliases. If more than one is found it will raise | ||||
|     a ConfigError. If one (and only one) is found then it will return the value | ||||
|     for the ConfigurationPoint. If the name or aliases are present in the dict it will | ||||
|     return the "default" parameter of this function. | ||||
|     """ | ||||
|     aliases = [cfg_point.name] + cfg_point.aliases | ||||
|     alias_map = [a for a in aliases if a in d] | ||||
|     if len(alias_map) > 1: | ||||
|         message = 'Only one of {} may be specified in a single entry' | ||||
|         raise ConfigError(message.format(aliases)) | ||||
|     elif alias_map: | ||||
|         if pop: | ||||
|             return d.pop(alias_map[0]) | ||||
|         else: | ||||
|             return d[alias_map[0]] | ||||
|     else: | ||||
|         return default | ||||
|  | ||||
|  | ||||
| def _load_file(filepath, error_name): | ||||
|     if not os.path.isfile(filepath): | ||||
|         raise ValueError("{} does not exist".format(filepath)) | ||||
|     try: | ||||
|         raw = read_pod(filepath) | ||||
|     except SerializerSyntaxError as e: | ||||
|         raise ConfigError('Error parsing {} {}: {}'.format(error_name, filepath, e)) | ||||
|     if not isinstance(raw, dict): | ||||
|         message = '{} does not contain a valid {} structure; top level must be a dict.' | ||||
|         raise ConfigError(message.format(filepath, error_name)) | ||||
|     return raw | ||||
|  | ||||
|  | ||||
| def get_workload_entry(w): | ||||
|     if isinstance(w, basestring): | ||||
|         w = {'name': w} | ||||
|     elif not isinstance(w, dict): | ||||
|         raise ConfigError('Invalid workload entry: "{}"') | ||||
|     return w | ||||
|  | ||||
|  | ||||
| def merge_result_processors_instruments(raw): | ||||
|     instruments = toggle_set(get_aliased_param(JobSpec.configuration['instrumentation'], | ||||
|                                                raw, default=[])) | ||||
|     result_processors = toggle_set(raw.pop('result_processors', [])) | ||||
|     if instruments and result_processors: | ||||
|         conflicts = instruments.conflicts_with(result_processors) | ||||
|         if conflicts: | ||||
|             msg = '"instrumentation" and "result_processors" have conflicting entries: {}' | ||||
|             entires = ', '.join('"{}"'.format(c.strip("~")) for c in conflicts) | ||||
|             raise ConfigError(msg.format(entires)) | ||||
|     raw['instrumentation'] = instruments.merge_with(instruments) | ||||
|  | ||||
|  | ||||
| def _construct_valid_entry(raw, seen_ids, counter_name): | ||||
|     entries = {} | ||||
|  | ||||
|     # Generate an automatic ID if the entry doesn't already have one | ||||
|     if "id" not in raw: | ||||
|         while True: | ||||
|             new_id = "{}{}".format(counter_name, counter(name=counter_name)) | ||||
|             if new_id not in seen_ids: | ||||
|                 break | ||||
|         entries["id"] = new_id | ||||
|     else: | ||||
|         entries["id"] = raw.pop("id") | ||||
|  | ||||
|     merge_result_processors_instruments(raw) | ||||
|  | ||||
|     # Validate all entries | ||||
|     for cfg_point in JobSpec.configuration.itervalues(): | ||||
|         value = get_aliased_param(cfg_point, raw) | ||||
|         if value is not None: | ||||
|             value = cfg_point.kind(value) | ||||
|             cfg_point.validate_value(cfg_point.name, value) | ||||
|             entries[cfg_point] = value | ||||
|  | ||||
|     # error if there are unknown entries | ||||
|     if raw: | ||||
|         msg = 'Invalid entry(ies) in "{}": "{}"' | ||||
|         raise ConfigError(msg.format(entries['id'], ', '.join(raw.keys()))) | ||||
|  | ||||
|     return entries | ||||
|  | ||||
| ############### | ||||
| ### Parsers ### | ||||
| ############### | ||||
|  | ||||
|  | ||||
| class ConfigParser(object): | ||||
|  | ||||
|     def __init__(self, wa_config, run_config, jobs_config, plugin_cache): | ||||
|         self.wa_config = wa_config | ||||
|         self.run_config = run_config | ||||
|         self.jobs_config = jobs_config | ||||
|         self.plugin_cache = plugin_cache | ||||
|  | ||||
|     def load_from_path(self, filepath): | ||||
|         self.load(_load_file(filepath, "Config"), filepath) | ||||
|  | ||||
|     def load(self, raw, source):  # pylint: disable=too-many-branches | ||||
|         try: | ||||
|             if 'run_name' in raw: | ||||
|                 msg = '"run_name" can only be specified in the config section of an agenda' | ||||
|                 raise ConfigError(msg) | ||||
|             if 'id' in raw: | ||||
|                 raise ConfigError('"id" cannot be set globally') | ||||
|  | ||||
|             merge_result_processors_instruments(raw) | ||||
|  | ||||
|             for cfg_point in self.wa_config.configuration.itervalues(): | ||||
|                 value = get_aliased_param(cfg_point, raw) | ||||
|                 if value is not None: | ||||
|                     self.wa_config.set(cfg_point.name, value) | ||||
|  | ||||
|             for cfg_point in self.run_config.configuration.itervalues(): | ||||
|                 value = get_aliased_param(cfg_point, raw) | ||||
|                 if value is not None: | ||||
|                     self.run_config.set(cfg_point.name, value) | ||||
|  | ||||
|             for cfg_point in JobSpec.configuration.itervalues(): | ||||
|                 value = get_aliased_param(cfg_point, raw) | ||||
|                 if value is not None: | ||||
|                     self.jobs_config.set_global_config(cfg_point.name, value) | ||||
|  | ||||
|             for name, value in raw.iteritems(): | ||||
|                 if self.plugin_cache.is_global_alias(name): | ||||
|                     self.plugin_cache.add_global_alias(name, value, source) | ||||
|  | ||||
|                 if "device_config" in raw: | ||||
|                     if self.plugin_cache.is_device(name): | ||||
|                         msg = "You cannot specify 'device_config' and '{}' in the same config" | ||||
|                         raise ConfigError(msg.format(name)) | ||||
|                     self.plugin_cache.add_device_config(raw.pop('device-config', dict()), source) | ||||
|  | ||||
|                 # Assume that all leftover config is for a plug-in | ||||
|                 # it is up to PluginCache to assert this assumption | ||||
|                 self.plugin_cache.add(name, value, source) | ||||
|  | ||||
|         except ConfigError as e: | ||||
|             raise ConfigError('Error in "{}":\n{}'.format(source, str(e))) | ||||
|  | ||||
|  | ||||
| class AgendaParser(object): | ||||
|  | ||||
|     def __init__(self, config_parser, wa_config, run_config, jobs_config, plugin_cache): | ||||
|         self.config_parser = config_parser | ||||
|         self.wa_config = wa_config | ||||
|         self.run_config = run_config | ||||
|         self.jobs_config = jobs_config | ||||
|         self.plugin_cache = plugin_cache | ||||
|  | ||||
|     def load(self, filepath):  # pylint: disable=too-many-branches, too-many-locals | ||||
|         raw = _load_file(filepath, 'Agenda') | ||||
|         try: | ||||
|             # PHASE 1: Populate and validate configuration. | ||||
|             for name in ['config', 'global']: | ||||
|                 entry = raw.pop(name, {}) | ||||
|                 if not isinstance(entry, dict): | ||||
|                     raise ConfigError('Invalid entry "{}" in {} - must be a dict'.format(name, filepath)) | ||||
|                 if 'run_name' in entry: | ||||
|                     self.run_config.set('run_name', entry.pop('run_name')) | ||||
|                 self.config_parser.load(entry, filepath) | ||||
|  | ||||
|             # PHASE 2: Finalizing config. | ||||
|             # Agenda config is the final config, so we can now finalise WA and run config | ||||
|             self.wa_config.finalize() | ||||
|             self.run_config.finalize() | ||||
|             self.jobs_config.finalise_global_config() | ||||
|             #TODO: Device stuff | ||||
|             # target_manager_class = self.plugin_cache.get_plugin_class(self.run_config.device) | ||||
|  | ||||
|             # PHASE 3: Getting "section" and "workload" entries. | ||||
|             sections = raw.pop("sections", []) | ||||
|             if not isinstance(sections, list): | ||||
|                 raise ConfigError('Invalid entry "sections" in {} - must be a list'.format(filepath)) | ||||
|             global_workloads = raw.pop("workloads", []) | ||||
|             if not isinstance(global_workloads, list): | ||||
|                 raise ConfigError('Invalid entry "workloads" in {} - must be a list'.format(filepath)) | ||||
|  | ||||
|             # PHASE 4: Collecting existing workload and section IDs | ||||
|             seen_section_ids = set() | ||||
|             for section in sections: | ||||
|                 entry_id = section.get("id") | ||||
|                 if entry_id is None: | ||||
|                     continue | ||||
|                 if entry_id in seen_section_ids: | ||||
|                     raise ConfigError('Duplicate section ID "{}".'.format(entry_id)) | ||||
|                 seen_section_ids.add(entry_id) | ||||
|  | ||||
|             seen_workload_ids = set() | ||||
|             for workload in global_workloads: | ||||
|                 entry_id = workload.get("id") | ||||
|                 if entry_id is None: | ||||
|                     continue | ||||
|                 if entry_id in seen_workload_ids: | ||||
|                     raise ConfigError('Duplicate workload ID "{}".'.format(entry_id)) | ||||
|                 seen_workload_ids.add(entry_id) | ||||
|  | ||||
|             # PHASE 5: Assigning IDs and validating entries | ||||
|             # TODO: Error handling for workload errors vs section errors ect | ||||
|             for workload in global_workloads: | ||||
|                 self.jobs_config.add_workload(self._process_entry(workload, seen_workload_ids)) | ||||
|  | ||||
|             for section in sections: | ||||
|                 workloads = [] | ||||
|                 for workload in section.pop("workloads", []): | ||||
|                     workloads.append(self._process_entry(workload, seen_workload_ids)) | ||||
|                 if "params" in section: | ||||
|                     section["runtime_params"] = section.pop("params") | ||||
|                 section = _construct_valid_entry(section, seen_section_ids, "s") | ||||
|                 self.jobs_config.add_section(section, workloads) | ||||
|  | ||||
|         except (ConfigError, SerializerSyntaxError) as e: | ||||
|             raise ConfigError("Error in '{}':\n\t{}".format(filepath, str(e))) | ||||
|  | ||||
|     def _process_entry(self, entry, seen_workload_ids): | ||||
|         entry = get_workload_entry(entry) | ||||
|         if "params" in entry: | ||||
|             entry["workload_parameters"] = entry.pop("params") | ||||
|         return _construct_valid_entry(entry, seen_workload_ids, "wk") | ||||
|  | ||||
|  | ||||
| class EnvironmentVarsParser(object): | ||||
|  | ||||
|     def __init__(self, wa_config, environ): | ||||
|         user_directory = environ.pop('WA_USER_DIRECTORY', '') | ||||
|         if user_directory: | ||||
|             wa_config.set('user_directory', user_directory) | ||||
|         plugin_paths = environ.pop('WA_PLUGIN_PATHS', '') | ||||
|         if plugin_paths: | ||||
|             wa_config.set('plugin_paths', plugin_paths.split(os.pathsep)) | ||||
|         ext_paths = environ.pop('WA_EXTENSION_PATHS', '') | ||||
|         if ext_paths: | ||||
|             wa_config.set('plugin_paths', ext_paths.split(os.pathsep)) | ||||
|  | ||||
|  | ||||
| # Command line options are parsed in the "run" command. This is used to send | ||||
| # certain arguments to the correct configuration points and keep a record of | ||||
| # how WA was invoked | ||||
| class CommandLineArgsParser(object): | ||||
|  | ||||
|     def __init__(self, cmd_args, wa_config, run_config, jobs_config): | ||||
|         wa_config.set("verbosity", cmd_args.verbosity) | ||||
|         # TODO: Is this correct? Does there need to be a third output dir param | ||||
|         run_config.set('output_directory', cmd_args.output_directory) | ||||
|         disabled_instruments = toggle_set(["~{}".format(i) for i in cmd_args.instruments_to_disable]) | ||||
|         jobs_config.disable_instruments(disabled_instruments) | ||||
|         jobs_config.only_run_ids(cmd_args.only_run_ids) | ||||
							
								
								
									
										55
									
								
								wlauto/core/configuration/plugin_cache.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								wlauto/core/configuration/plugin_cache.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,55 @@ | ||||
| #    Copyright 2016 ARM Limited | ||||
| # | ||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||||
| # you may not use this file except in compliance with the License. | ||||
| # You may obtain a copy of the License at | ||||
| # | ||||
| #     http://www.apache.org/licenses/LICENSE-2.0 | ||||
| # | ||||
| # Unless required by applicable law or agreed to in writing, software | ||||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | ||||
|  | ||||
|  | ||||
| from collections import OrderedDict | ||||
|  | ||||
|  | ||||
| class PluginCache(object): | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.plugin_configs = {} | ||||
|         self.device_config = OrderedDict() | ||||
|         self.source_list = [] | ||||
|         self.finalised = False | ||||
|         # TODO: Build dics of global_alias: [list of destinations] | ||||
|  | ||||
|     def add_source(self, source): | ||||
|         if source in self.source_list: | ||||
|             raise Exception("Source has already been added.") | ||||
|         self.source_list.append(source) | ||||
|  | ||||
|     def add(self, name, config, source): | ||||
|         if source not in self.source_list: | ||||
|             msg = "Source '{}' has not been added to the plugin cache." | ||||
|             raise Exception(msg.format(source)) | ||||
|  | ||||
|         if name not in self.plugin_configs: | ||||
|             self.plugin_configs[name] = OrderedDict() | ||||
|         self.plugin_configs[name][source] = config | ||||
|  | ||||
|     def finalise_config(self): | ||||
|         pass | ||||
|  | ||||
|     def disable_instrument(self, instrument): | ||||
|         pass | ||||
|  | ||||
|     def add_device_config(self, config): | ||||
|         pass | ||||
|  | ||||
|     def is_global_alias(self, name): | ||||
|         pass | ||||
|  | ||||
|     def add_global_alias(self, name, value): | ||||
|         pass | ||||
							
								
								
									
										53
									
								
								wlauto/core/configuration/tree.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								wlauto/core/configuration/tree.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,53 @@ | ||||
| #    Copyright 2016 ARM Limited | ||||
| # | ||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||||
| # you may not use this file except in compliance with the License. | ||||
| # You may obtain a copy of the License at | ||||
| # | ||||
| #     http://www.apache.org/licenses/LICENSE-2.0 | ||||
| # | ||||
| # Unless required by applicable law or agreed to in writing, software | ||||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | ||||
|  | ||||
|  | ||||
| class Node(object): | ||||
|     @property | ||||
|     def is_leaf(self): | ||||
|         return not bool(self.children) | ||||
|  | ||||
|     def __init__(self, value, parent=None): | ||||
|         self.workloads = [] | ||||
|         self.parent = parent | ||||
|         self.children = [] | ||||
|         self.config = value | ||||
|  | ||||
|     def add_section(self, section): | ||||
|         new_node = Node(section, parent=self) | ||||
|         self.children.append(new_node) | ||||
|         return new_node | ||||
|  | ||||
|     def add_workload(self, workload): | ||||
|         self.workloads.append(workload) | ||||
|  | ||||
|     def descendants(self): | ||||
|         for child in self.children: | ||||
|             for n in child.descendants(): | ||||
|                 yield n | ||||
|             yield child | ||||
|  | ||||
|     def ancestors(self): | ||||
|         if self.parent is not None: | ||||
|             yield self.parent | ||||
|             for ancestor in self.parent.ancestors(): | ||||
|                 yield ancestor | ||||
|  | ||||
|     def leaves(self): | ||||
|         if self.is_leaf: | ||||
|             yield self | ||||
|         else: | ||||
|             for n in self.descendants(): | ||||
|                 if n.is_leaf: | ||||
|                     yield n | ||||
		Reference in New Issue
	
	Block a user