2018-07-04 15:39:44 +01:00
|
|
|
# Copyright 2015-2018 ARM Limited
|
2017-03-06 11:10:25 +00:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
2018-07-04 13:40:21 +01:00
|
|
|
# pylint: disable=no-self-use
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
import os
|
2018-04-25 16:03:50 +01:00
|
|
|
import logging
|
2018-07-04 13:40:21 +01:00
|
|
|
from functools import reduce # pylint: disable=redefined-builtin
|
2017-03-06 11:10:25 +00:00
|
|
|
|
2018-07-19 15:21:36 +01:00
|
|
|
from devlib.utils.types import identifier
|
|
|
|
|
2017-03-15 14:07:14 +00:00
|
|
|
from wa.framework.configuration.core import JobSpec
|
|
|
|
from wa.framework.exception import ConfigError
|
2018-04-25 16:03:50 +01:00
|
|
|
from wa.utils import log
|
2017-03-15 14:07:14 +00:00
|
|
|
from wa.utils.serializer import json, read_pod, SerializerSyntaxError
|
|
|
|
from wa.utils.types import toggle_set, counter
|
2018-07-19 08:17:02 +01:00
|
|
|
from wa.utils.misc import merge_config_values, isiterable
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
|
2018-04-25 16:03:50 +01:00
|
|
|
logger = logging.getLogger('config')
|
|
|
|
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
class ConfigParser(object):
|
|
|
|
|
|
|
|
def load_from_path(self, state, filepath):
|
2018-07-19 08:17:02 +01:00
|
|
|
raw, includes = _load_file(filepath, "Config")
|
|
|
|
self.load(state, raw, filepath)
|
|
|
|
return includes
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
def load(self, state, raw, source, wrap_exceptions=True): # pylint: disable=too-many-branches
|
2018-04-25 16:03:50 +01:00
|
|
|
logger.debug('Parsing config from "{}"'.format(source))
|
|
|
|
log.indent()
|
2017-03-06 11:10:25 +00:00
|
|
|
try:
|
2017-03-15 17:16:59 +00:00
|
|
|
state.plugin_cache.add_source(source)
|
2017-03-06 11:10:25 +00:00
|
|
|
if 'run_name' in raw:
|
|
|
|
msg = '"run_name" can only be specified in the config '\
|
|
|
|
'section of an agenda'
|
|
|
|
raise ConfigError(msg)
|
|
|
|
|
|
|
|
if 'id' in raw:
|
|
|
|
raise ConfigError('"id" cannot be set globally')
|
|
|
|
|
2017-11-03 16:16:02 +00:00
|
|
|
merge_augmentations(raw)
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
# Get WA core configuration
|
2018-05-30 13:58:49 +01:00
|
|
|
for cfg_point in state.settings.configuration.values():
|
2017-03-15 14:07:14 +00:00
|
|
|
value = pop_aliased_param(cfg_point, raw)
|
2017-03-06 11:10:25 +00:00
|
|
|
if value is not None:
|
2018-04-25 16:03:50 +01:00
|
|
|
logger.debug('Setting meta "{}" to "{}"'.format(cfg_point.name, value))
|
2017-03-06 11:10:25 +00:00
|
|
|
state.settings.set(cfg_point.name, value)
|
|
|
|
|
|
|
|
# Get run specific configuration
|
2018-05-30 13:58:49 +01:00
|
|
|
for cfg_point in state.run_config.configuration.values():
|
2017-03-15 14:07:14 +00:00
|
|
|
value = pop_aliased_param(cfg_point, raw)
|
2017-03-06 11:10:25 +00:00
|
|
|
if value is not None:
|
2018-04-25 16:03:50 +01:00
|
|
|
logger.debug('Setting run "{}" to "{}"'.format(cfg_point.name, value))
|
2017-03-06 11:10:25 +00:00
|
|
|
state.run_config.set(cfg_point.name, value)
|
|
|
|
|
|
|
|
# Get global job spec configuration
|
2018-05-30 13:58:49 +01:00
|
|
|
for cfg_point in JobSpec.configuration.values():
|
2017-03-15 14:07:14 +00:00
|
|
|
value = pop_aliased_param(cfg_point, raw)
|
2017-03-06 11:10:25 +00:00
|
|
|
if value is not None:
|
2018-04-25 16:03:50 +01:00
|
|
|
logger.debug('Setting global "{}" to "{}"'.format(cfg_point.name, value))
|
2017-03-06 11:10:25 +00:00
|
|
|
state.jobs_config.set_global_value(cfg_point.name, value)
|
|
|
|
|
2018-05-30 13:58:49 +01:00
|
|
|
for name, values in raw.items():
|
2017-03-06 11:10:25 +00:00
|
|
|
# Assume that all leftover config is for a plug-in or a global
|
|
|
|
# alias it is up to PluginCache to assert this assumption
|
2018-07-19 15:21:36 +01:00
|
|
|
logger.debug('Caching "{}" with "{}"'.format(identifier(name), values))
|
|
|
|
state.plugin_cache.add_configs(identifier(name), values, source)
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
except ConfigError as e:
|
|
|
|
if wrap_exceptions:
|
|
|
|
raise ConfigError('Error in "{}":\n{}'.format(source, str(e)))
|
|
|
|
else:
|
|
|
|
raise e
|
2018-04-25 16:03:50 +01:00
|
|
|
finally:
|
|
|
|
log.dedent()
|
|
|
|
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
class AgendaParser(object):
|
|
|
|
|
|
|
|
def load_from_path(self, state, filepath):
|
2018-07-19 08:17:02 +01:00
|
|
|
raw, includes = _load_file(filepath, 'Agenda')
|
2017-03-06 11:10:25 +00:00
|
|
|
self.load(state, raw, filepath)
|
2018-07-19 08:17:02 +01:00
|
|
|
return includes
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
def load(self, state, raw, source):
|
2018-04-25 16:03:50 +01:00
|
|
|
logger.debug('Parsing agenda from "{}"'.format(source))
|
|
|
|
log.indent()
|
2017-03-06 11:10:25 +00:00
|
|
|
try:
|
|
|
|
if not isinstance(raw, dict):
|
|
|
|
raise ConfigError('Invalid agenda, top level entry must be a dict')
|
|
|
|
|
|
|
|
self._populate_and_validate_config(state, raw, source)
|
|
|
|
sections = self._pop_sections(raw)
|
|
|
|
global_workloads = self._pop_workloads(raw)
|
2018-04-11 17:51:04 +01:00
|
|
|
if not global_workloads:
|
|
|
|
msg = 'No jobs avaliable. Please ensure you have specified at '\
|
|
|
|
'least one workload to run.'
|
|
|
|
raise ConfigError(msg)
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
if raw:
|
|
|
|
msg = 'Invalid top level agenda entry(ies): "{}"'
|
2018-05-30 13:58:49 +01:00
|
|
|
raise ConfigError(msg.format('", "'.join(list(raw.keys()))))
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
sect_ids, wkl_ids = self._collect_ids(sections, global_workloads)
|
|
|
|
self._process_global_workloads(state, global_workloads, wkl_ids)
|
|
|
|
self._process_sections(state, sections, sect_ids, wkl_ids)
|
|
|
|
|
|
|
|
state.agenda = source
|
|
|
|
|
|
|
|
except (ConfigError, SerializerSyntaxError) as e:
|
|
|
|
raise ConfigError('Error in "{}":\n\t{}'.format(source, str(e)))
|
2018-04-25 16:03:50 +01:00
|
|
|
finally:
|
|
|
|
log.dedent()
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
def _populate_and_validate_config(self, state, raw, source):
|
|
|
|
for name in ['config', 'global']:
|
|
|
|
entry = raw.pop(name, None)
|
|
|
|
if entry is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not isinstance(entry, dict):
|
|
|
|
msg = 'Invalid entry "{}" - must be a dict'
|
|
|
|
raise ConfigError(msg.format(name))
|
|
|
|
|
|
|
|
if 'run_name' in entry:
|
2018-04-25 16:03:50 +01:00
|
|
|
value = entry.pop('run_name')
|
|
|
|
logger.debug('Setting run name to "{}"'.format(value))
|
|
|
|
state.run_config.set('run_name', value)
|
2017-03-06 11:10:25 +00:00
|
|
|
|
2018-07-04 13:40:21 +01:00
|
|
|
state.load_config(entry, '{}/{}'.format(source, name))
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
def _pop_sections(self, raw):
|
|
|
|
sections = raw.pop("sections", [])
|
|
|
|
if not isinstance(sections, list):
|
|
|
|
raise ConfigError('Invalid entry "sections" - must be a list')
|
2018-06-01 16:30:03 +01:00
|
|
|
for section in sections:
|
|
|
|
if not hasattr(section, 'items'):
|
|
|
|
raise ConfigError('Invalid section "{}" - must be a dict'.format(section))
|
2017-03-06 11:10:25 +00:00
|
|
|
return sections
|
|
|
|
|
|
|
|
def _pop_workloads(self, raw):
|
|
|
|
workloads = raw.pop("workloads", [])
|
|
|
|
if not isinstance(workloads, list):
|
|
|
|
raise ConfigError('Invalid entry "workloads" - must be a list')
|
|
|
|
return workloads
|
|
|
|
|
|
|
|
def _collect_ids(self, sections, global_workloads):
|
|
|
|
seen_section_ids = set()
|
|
|
|
seen_workload_ids = set()
|
|
|
|
|
|
|
|
for workload in global_workloads:
|
|
|
|
workload = _get_workload_entry(workload)
|
|
|
|
_collect_valid_id(workload.get("id"), seen_workload_ids, "workload")
|
|
|
|
|
|
|
|
for section in sections:
|
|
|
|
_collect_valid_id(section.get("id"), seen_section_ids, "section")
|
|
|
|
for workload in section["workloads"] if "workloads" in section else []:
|
|
|
|
workload = _get_workload_entry(workload)
|
2018-01-02 14:49:07 +00:00
|
|
|
_collect_valid_id(workload.get("id"), seen_workload_ids,
|
2017-03-06 11:10:25 +00:00
|
|
|
"workload")
|
|
|
|
|
|
|
|
return seen_section_ids, seen_workload_ids
|
|
|
|
|
|
|
|
def _process_global_workloads(self, state, global_workloads, seen_wkl_ids):
|
|
|
|
for workload_entry in global_workloads:
|
|
|
|
workload = _process_workload_entry(workload_entry, seen_wkl_ids,
|
|
|
|
state.jobs_config)
|
|
|
|
state.jobs_config.add_workload(workload)
|
|
|
|
|
|
|
|
def _process_sections(self, state, sections, seen_sect_ids, seen_wkl_ids):
|
|
|
|
for section in sections:
|
|
|
|
workloads = []
|
|
|
|
for workload_entry in section.pop("workloads", []):
|
2017-03-14 15:47:15 +00:00
|
|
|
workload = _process_workload_entry(workload_entry, seen_wkl_ids,
|
2017-03-06 11:10:25 +00:00
|
|
|
state.jobs_config)
|
|
|
|
workloads.append(workload)
|
|
|
|
|
2017-03-15 14:07:14 +00:00
|
|
|
if 'params' in section:
|
|
|
|
if 'runtime_params' in section:
|
|
|
|
msg = 'both "params" and "runtime_params" specified in a '\
|
|
|
|
'section: "{}"'
|
|
|
|
raise ConfigError(msg.format(json.dumps(section, indent=None)))
|
|
|
|
section['runtime_params'] = section.pop('params')
|
|
|
|
|
2018-09-21 16:49:50 +01:00
|
|
|
group = section.pop('group', None)
|
2018-01-02 14:49:07 +00:00
|
|
|
section = _construct_valid_entry(section, seen_sect_ids,
|
2017-03-06 11:10:25 +00:00
|
|
|
"s", state.jobs_config)
|
2018-09-21 16:49:50 +01:00
|
|
|
state.jobs_config.add_section(section, workloads, group)
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
########################
|
|
|
|
### Helper functions ###
|
|
|
|
########################
|
|
|
|
|
2017-03-15 14:07:14 +00:00
|
|
|
def pop_aliased_param(cfg_point, d, default=None):
|
2017-03-06 11:10:25 +00:00
|
|
|
"""
|
|
|
|
Given a ConfigurationPoint and a dict, this function will search the dict for
|
|
|
|
the ConfigurationPoint's name/aliases. If more than one is found it will raise
|
|
|
|
a ConfigError. If one (and only one) is found then it will return the value
|
|
|
|
for the ConfigurationPoint. If the name or aliases are present in the dict it will
|
|
|
|
return the "default" parameter of this function.
|
|
|
|
"""
|
|
|
|
aliases = [cfg_point.name] + cfg_point.aliases
|
|
|
|
alias_map = [a for a in aliases if a in d]
|
|
|
|
if len(alias_map) > 1:
|
2017-07-25 15:37:56 +01:00
|
|
|
raise ConfigError('Duplicate entry: {}'.format(aliases))
|
2017-03-06 11:10:25 +00:00
|
|
|
elif alias_map:
|
2017-03-15 14:07:14 +00:00
|
|
|
return d.pop(alias_map[0])
|
2017-03-06 11:10:25 +00:00
|
|
|
else:
|
|
|
|
return default
|
|
|
|
|
|
|
|
|
|
|
|
def _load_file(filepath, error_name):
|
|
|
|
if not os.path.isfile(filepath):
|
|
|
|
raise ValueError("{} does not exist".format(filepath))
|
|
|
|
try:
|
|
|
|
raw = read_pod(filepath)
|
2018-07-19 08:17:02 +01:00
|
|
|
includes = _process_includes(raw, filepath, error_name)
|
2017-03-06 11:10:25 +00:00
|
|
|
except SerializerSyntaxError as e:
|
|
|
|
raise ConfigError('Error parsing {} {}: {}'.format(error_name, filepath, e))
|
|
|
|
if not isinstance(raw, dict):
|
|
|
|
message = '{} does not contain a valid {} structure; top level must be a dict.'
|
|
|
|
raise ConfigError(message.format(filepath, error_name))
|
2018-07-19 08:17:02 +01:00
|
|
|
return raw, includes
|
|
|
|
|
|
|
|
|
|
|
|
def _process_includes(raw, filepath, error_name):
|
|
|
|
if not raw:
|
|
|
|
return []
|
|
|
|
|
|
|
|
source_dir = os.path.dirname(filepath)
|
|
|
|
included_files = []
|
|
|
|
replace_value = None
|
|
|
|
|
|
|
|
if hasattr(raw, 'items'):
|
|
|
|
for key, value in raw.items():
|
|
|
|
if key == 'include#':
|
|
|
|
include_path = os.path.expanduser(os.path.join(source_dir, value))
|
|
|
|
included_files.append(include_path)
|
|
|
|
replace_value, includes = _load_file(include_path, error_name)
|
|
|
|
included_files.extend(includes)
|
|
|
|
elif hasattr(value, 'items') or isiterable(value):
|
|
|
|
includes = _process_includes(value, filepath, error_name)
|
|
|
|
included_files.extend(includes)
|
|
|
|
elif isiterable(raw):
|
|
|
|
for element in raw:
|
|
|
|
if hasattr(element, 'items') or isiterable(element):
|
|
|
|
includes = _process_includes(element, filepath, error_name)
|
|
|
|
included_files.extend(includes)
|
|
|
|
|
|
|
|
if replace_value is not None:
|
|
|
|
del raw['include#']
|
|
|
|
for key, value in replace_value.items():
|
|
|
|
raw[key] = merge_config_values(value, raw.get(key, None))
|
|
|
|
|
|
|
|
return included_files
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
|
2017-11-03 16:16:02 +00:00
|
|
|
def merge_augmentations(raw):
|
|
|
|
"""
|
2018-01-10 14:54:43 +00:00
|
|
|
Since, from configuration perspective, output processors and instruments are
|
2017-11-03 16:16:02 +00:00
|
|
|
handled identically, the configuration entries are now interchangeable. E.g. it is
|
2018-01-10 14:54:43 +00:00
|
|
|
now valid to specify a output processor in an instruments list. This is to make things
|
|
|
|
easier for the users, as, from their perspective, the distinction is somewhat arbitrary.
|
2017-11-03 16:16:02 +00:00
|
|
|
|
|
|
|
For backwards compatibility, both entries are still valid, and this
|
|
|
|
function merges them together into a single "augmentations" set, ensuring
|
|
|
|
that there are no conflicts between the entries.
|
|
|
|
|
|
|
|
"""
|
|
|
|
cfg_point = JobSpec.configuration['augmentations']
|
2018-07-05 14:02:05 +01:00
|
|
|
names = [cfg_point.name, ] + cfg_point.aliases
|
2017-11-03 16:16:02 +00:00
|
|
|
|
2018-03-22 12:19:47 +00:00
|
|
|
entries = []
|
|
|
|
for n in names:
|
|
|
|
if n not in raw:
|
|
|
|
continue
|
|
|
|
value = raw.pop(n)
|
|
|
|
try:
|
|
|
|
entries.append(toggle_set(value))
|
2018-07-05 14:02:05 +01:00
|
|
|
except TypeError as exc:
|
2018-03-22 12:19:47 +00:00
|
|
|
msg = 'Invalid value "{}" for "{}": {}'
|
2018-07-05 14:02:05 +01:00
|
|
|
raise ConfigError(msg.format(value, n, exc))
|
2017-11-03 16:16:02 +00:00
|
|
|
|
|
|
|
# Make sure none of the specified aliases conflict with each other
|
2020-10-19 18:07:21 +01:00
|
|
|
to_check = list(entries)
|
2017-11-03 16:16:02 +00:00
|
|
|
while len(to_check) > 1:
|
|
|
|
check_entry = to_check.pop()
|
|
|
|
for e in to_check:
|
|
|
|
conflicts = check_entry.conflicts_with(e)
|
|
|
|
if conflicts:
|
|
|
|
msg = '"{}" and "{}" have conflicting entries: {}'
|
2018-07-04 13:40:21 +01:00
|
|
|
conflict_string = ', '.join('"{}"'.format(c.strip("~"))
|
|
|
|
for c in conflicts)
|
2017-11-03 16:16:02 +00:00
|
|
|
raise ConfigError(msg.format(check_entry, e, conflict_string))
|
|
|
|
|
|
|
|
if entries:
|
2018-05-01 09:15:17 +01:00
|
|
|
raw['augmentations'] = reduce(lambda x, y: x.union(y), entries)
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _pop_aliased(d, names, entry_id):
|
|
|
|
name_count = sum(1 for n in names if n in d)
|
|
|
|
if name_count > 1:
|
|
|
|
names_list = ', '.join(names)
|
2017-03-14 16:40:37 +00:00
|
|
|
msg = 'Invalid workload entry "{}": at most one of ({}}) must be specified.'
|
|
|
|
raise ConfigError(msg.format(entry_id, names_list))
|
2017-03-06 11:10:25 +00:00
|
|
|
for name in names:
|
|
|
|
if name in d:
|
|
|
|
return d.pop(name)
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def _construct_valid_entry(raw, seen_ids, prefix, jobs_config):
|
|
|
|
workload_entry = {}
|
|
|
|
|
|
|
|
# Generate an automatic ID if the entry doesn't already have one
|
|
|
|
if 'id' not in raw:
|
|
|
|
while True:
|
|
|
|
new_id = '{}{}'.format(prefix, counter(name=prefix))
|
|
|
|
if new_id not in seen_ids:
|
|
|
|
break
|
|
|
|
workload_entry['id'] = new_id
|
|
|
|
seen_ids.add(new_id)
|
|
|
|
else:
|
|
|
|
workload_entry['id'] = raw.pop('id')
|
|
|
|
|
2018-01-10 14:54:43 +00:00
|
|
|
# Process instruments
|
2017-11-03 16:16:02 +00:00
|
|
|
merge_augmentations(raw)
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
# Validate all workload_entry
|
2018-05-30 13:58:49 +01:00
|
|
|
for name, cfg_point in JobSpec.configuration.items():
|
2017-03-15 14:07:14 +00:00
|
|
|
value = pop_aliased_param(cfg_point, raw)
|
2017-03-06 11:10:25 +00:00
|
|
|
if value is not None:
|
|
|
|
value = cfg_point.kind(value)
|
|
|
|
cfg_point.validate_value(name, value)
|
|
|
|
workload_entry[name] = value
|
|
|
|
|
2017-11-03 15:52:49 +00:00
|
|
|
if "augmentations" in workload_entry:
|
2018-04-27 14:10:50 +01:00
|
|
|
if '~~' in workload_entry['augmentations']:
|
|
|
|
msg = '"~~" can only be specfied in top-level config, and not for individual workloads/sections'
|
|
|
|
raise ConfigError(msg)
|
|
|
|
jobs_config.update_augmentations(workload_entry['augmentations'])
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
# error if there are unknown workload_entry
|
|
|
|
if raw:
|
|
|
|
msg = 'Invalid entry(ies) in "{}": "{}"'
|
2018-05-30 13:58:49 +01:00
|
|
|
raise ConfigError(msg.format(workload_entry['id'], ', '.join(list(raw.keys()))))
|
2017-03-06 11:10:25 +00:00
|
|
|
|
|
|
|
return workload_entry
|
|
|
|
|
|
|
|
|
|
|
|
def _collect_valid_id(entry_id, seen_ids, entry_type):
|
|
|
|
if entry_id is None:
|
|
|
|
return
|
2018-05-18 17:00:56 +01:00
|
|
|
entry_id = str(entry_id)
|
2017-03-06 11:10:25 +00:00
|
|
|
if entry_id in seen_ids:
|
|
|
|
raise ConfigError('Duplicate {} ID "{}".'.format(entry_type, entry_id))
|
|
|
|
# "-" is reserved for joining section and workload IDs
|
|
|
|
if "-" in entry_id:
|
|
|
|
msg = 'Invalid {} ID "{}"; IDs cannot contain a "-"'
|
|
|
|
raise ConfigError(msg.format(entry_type, entry_id))
|
|
|
|
if entry_id == "global":
|
|
|
|
msg = 'Invalid {} ID "global"; is a reserved ID'
|
|
|
|
raise ConfigError(msg.format(entry_type))
|
|
|
|
seen_ids.add(entry_id)
|
|
|
|
|
|
|
|
|
|
|
|
def _get_workload_entry(workload):
|
2018-05-30 13:58:49 +01:00
|
|
|
if isinstance(workload, str):
|
2017-03-06 11:10:25 +00:00
|
|
|
workload = {'name': workload}
|
|
|
|
elif not isinstance(workload, dict):
|
|
|
|
raise ConfigError('Invalid workload entry: "{}"')
|
|
|
|
return workload
|
|
|
|
|
|
|
|
|
|
|
|
def _process_workload_entry(workload, seen_workload_ids, jobs_config):
|
|
|
|
workload = _get_workload_entry(workload)
|
2018-01-02 14:49:07 +00:00
|
|
|
workload = _construct_valid_entry(workload, seen_workload_ids,
|
2017-03-06 11:10:25 +00:00
|
|
|
"wk", jobs_config)
|
2018-05-18 16:59:28 +01:00
|
|
|
if "workload_name" not in workload:
|
|
|
|
raise ConfigError('No workload name specified in entry {}'.format(workload['id']))
|
2017-03-06 11:10:25 +00:00
|
|
|
return workload
|