mirror of
https://github.com/ARM-software/workload-automation.git
synced 2025-01-31 02:01:16 +00:00
pluginloader: Replaced extension loader with WA3 plugin loader
In the process removed modules and boot_strap.py. Also Renamed extensions Plugins. Louie is now monkey patched rather than containing a modified version in external
This commit is contained in:
parent
a4a428c9ae
commit
1e6763ee9b
2
.gitignore
vendored
2
.gitignore
vendored
@ -12,7 +12,7 @@ dist/
|
||||
.ropeproject/
|
||||
wa_output/
|
||||
doc/source/api/
|
||||
doc/source/extensions/
|
||||
doc/source/plugins/
|
||||
MANIFEST
|
||||
wlauto/external/uiautomator/bin/
|
||||
wlauto/external/uiautomator/*.properties
|
||||
|
1
setup.py
1
setup.py
@ -78,6 +78,7 @@ params = dict(
|
||||
'pyYAML', # YAML-formatted agenda parsing
|
||||
'requests', # Fetch assets over HTTP
|
||||
'devlib', # Interacting with devices
|
||||
'louie' # Handles signal callbacks
|
||||
],
|
||||
extras_require={
|
||||
'other': ['jinja2', 'pandas>=0.13.1'],
|
||||
|
@ -13,18 +13,18 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from wlauto.core.bootstrap import settings # NOQA
|
||||
from wlauto.core.config.core import settings # NOQA
|
||||
from wlauto.core.device_manager import DeviceManager, RuntimeParameter, CoreParameter # NOQA
|
||||
from wlauto.core.command import Command # NOQA
|
||||
from wlauto.core.workload import Workload # NOQA
|
||||
from wlauto.core.extension import Module, Parameter, Artifact, Alias # NOQA
|
||||
from wlauto.core.extension_loader import ExtensionLoader # NOQA
|
||||
from wlauto.core.plugin import Parameter, Artifact, Alias # NOQA
|
||||
import wlauto.core.pluginloader as PluginLoader # NOQA
|
||||
from wlauto.core.instrumentation import Instrument # NOQA
|
||||
from wlauto.core.result import ResultProcessor, IterationResult # NOQA
|
||||
from wlauto.core.resource import ResourceGetter, Resource, GetterPriority, NO_ONE # NOQA
|
||||
from wlauto.core.exttype import get_extension_type # NOQA Note: MUST be imported after other core imports.
|
||||
from wlauto.core.exttype import get_plugin_type # NOQA Note: MUST be imported after other core imports.
|
||||
|
||||
from wlauto.common.resources import File, ExtensionAsset, Executable
|
||||
from wlauto.common.resources import File, PluginAsset, Executable
|
||||
from wlauto.common.android.resources import ApkFile, JarFile
|
||||
from wlauto.common.android.workload import (UiAutomatorWorkload, ApkWorkload, AndroidBenchmark, # NOQA
|
||||
AndroidUiAutoBenchmark, GameWorkload) # NOQA
|
||||
|
@ -27,7 +27,7 @@ from collections import OrderedDict
|
||||
|
||||
import yaml
|
||||
|
||||
from wlauto import ExtensionLoader, Command, settings
|
||||
from wlauto import PluginLoader, Command, settings
|
||||
from wlauto.exceptions import CommandError, ConfigError
|
||||
from wlauto.utils.cli import init_argument_parser
|
||||
from wlauto.utils.misc import (capitalize, check_output,
|
||||
@ -139,8 +139,8 @@ class CreateWorkloadSubcommand(CreateSubcommand):
|
||||
class CreatePackageSubcommand(CreateSubcommand):
|
||||
|
||||
name = 'package'
|
||||
description = '''Create a new empty Python package for WA extensions. On installation,
|
||||
this package will "advertise" itself to WA so that Extensions with in it will
|
||||
description = '''Create a new empty Python package for WA plugins. On installation,
|
||||
this package will "advertise" itself to WA so that Plugins with in it will
|
||||
be loaded by WA when it runs.'''
|
||||
|
||||
def initialize(self):
|
||||
@ -156,9 +156,9 @@ class CreatePackageSubcommand(CreateSubcommand):
|
||||
def execute(self, args): # pylint: disable=R0201
|
||||
package_dir = args.path or os.path.abspath('.')
|
||||
template_path = os.path.join(TEMPLATES_DIR, 'setup.template')
|
||||
self.create_extensions_package(package_dir, args.name, template_path, args.force)
|
||||
self.create_plugins_package(package_dir, args.name, template_path, args.force)
|
||||
|
||||
def create_extensions_package(self, location, name, setup_template_path, overwrite=False):
|
||||
def create_plugins_package(self, location, name, setup_template_path, overwrite=False):
|
||||
package_path = os.path.join(location, name)
|
||||
if os.path.exists(package_path):
|
||||
if overwrite:
|
||||
@ -178,13 +178,13 @@ class CreateAgendaSubcommand(CreateSubcommand):
|
||||
|
||||
name = 'agenda'
|
||||
description = """
|
||||
Create an agenda whith the specified extensions enabled. And parameters set to their
|
||||
Create an agenda whith the specified plugins enabled. And parameters set to their
|
||||
default values.
|
||||
"""
|
||||
|
||||
def initialize(self):
|
||||
self.parser.add_argument('extensions', nargs='+',
|
||||
help='Extensions to be added')
|
||||
self.parser.add_argument('plugins', nargs='+',
|
||||
help='Plugins to be added')
|
||||
self.parser.add_argument('-i', '--iterations', type=int, default=1,
|
||||
help='Sets the number of iterations for all workloads')
|
||||
self.parser.add_argument('-r', '--include-runtime-params', action='store_true',
|
||||
@ -192,23 +192,23 @@ class CreateAgendaSubcommand(CreateSubcommand):
|
||||
Adds runtime parameters to the global section of the generated
|
||||
agenda. Note: these do not have default values, so only name
|
||||
will be added. Also, runtime parameters are devices-specific, so
|
||||
a device must be specified (either in the list of extensions,
|
||||
a device must be specified (either in the list of plugins,
|
||||
or in the existing config).
|
||||
""")
|
||||
self.parser.add_argument('-o', '--output', metavar='FILE',
|
||||
help='Output file. If not specfied, STDOUT will be used instead.')
|
||||
|
||||
def execute(self, args): # pylint: disable=no-self-use,too-many-branches,too-many-statements
|
||||
loader = ExtensionLoader(packages=settings.extension_packages,
|
||||
paths=settings.extension_paths)
|
||||
loader = PluginLoader(packages=settings.plugin_packages,
|
||||
paths=settings.plugin_paths)
|
||||
agenda = OrderedDict()
|
||||
agenda['config'] = OrderedDict(instrumentation=[], result_processors=[])
|
||||
agenda['global'] = OrderedDict(iterations=args.iterations)
|
||||
agenda['workloads'] = []
|
||||
device = None
|
||||
device_config = None
|
||||
for name in args.extensions:
|
||||
extcls = loader.get_extension_class(name)
|
||||
for name in args.plugins:
|
||||
extcls = loader.get_plugin_class(name)
|
||||
config = loader.get_default_config(name)
|
||||
del config['modules']
|
||||
|
||||
@ -236,10 +236,10 @@ class CreateAgendaSubcommand(CreateSubcommand):
|
||||
if args.include_runtime_params:
|
||||
if not device:
|
||||
if settings.device:
|
||||
device = loader.get_extension_class(settings.device)
|
||||
device = loader.get_plugin_class(settings.device)
|
||||
device_config = loader.get_default_config(settings.device)
|
||||
else:
|
||||
raise ConfigError('-r option requires for a device to be in the list of extensions')
|
||||
raise ConfigError('-r option requires for a device to be in the list of plugins')
|
||||
rps = OrderedDict()
|
||||
for rp in device.runtime_parameters:
|
||||
if hasattr(rp, 'get_runtime_parameters'):
|
||||
@ -290,7 +290,7 @@ class CreateCommand(Command):
|
||||
|
||||
def create_workload(name, kind='basic', where='local', check_name=True, **kwargs):
|
||||
if check_name:
|
||||
extloader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths)
|
||||
extloader = PluginLoader(packages=settings.plugin_packages, paths=settings.plugin_paths)
|
||||
if name in [wl.name for wl in extloader.list_workloads()]:
|
||||
raise CommandError('Workload with name "{}" already exists.'.format(name))
|
||||
|
||||
|
@ -14,26 +14,26 @@
|
||||
#
|
||||
|
||||
|
||||
from wlauto import ExtensionLoader, Command, settings
|
||||
from wlauto import PluginLoader, Command, settings
|
||||
from wlauto.utils.formatter import DescriptionListFormatter
|
||||
from wlauto.utils.doc import get_summary
|
||||
|
||||
from wlauto.core import pluginloader
|
||||
|
||||
class ListCommand(Command):
|
||||
|
||||
name = 'list'
|
||||
description = 'List available WA extensions with a short description of each.'
|
||||
description = 'List available WA plugins with a short description of each.'
|
||||
|
||||
def initialize(self, context):
|
||||
extension_types = ['{}s'.format(ext.name) for ext in settings.extensions]
|
||||
plugin_types = ['{}s'.format(name) for name in pluginloader.kinds]
|
||||
self.parser.add_argument('kind', metavar='KIND',
|
||||
help=('Specify the kind of extension to list. Must be '
|
||||
'one of: {}'.format(', '.join(extension_types))),
|
||||
choices=extension_types)
|
||||
help=('Specify the kind of plugin to list. Must be '
|
||||
'one of: {}'.format(', '.join(plugin_types))),
|
||||
choices=plugin_types)
|
||||
self.parser.add_argument('-n', '--name', help='Filter results by the name specified')
|
||||
self.parser.add_argument('-o', '--packaged-only', action='store_true',
|
||||
help='''
|
||||
Only list extensions packaged with WA itself. Do not list extensions
|
||||
Only list plugins packaged with WA itself. Do not list plugins
|
||||
installed locally or from other packages.
|
||||
''')
|
||||
self.parser.add_argument('-p', '--platform', help='Only list results that are supported by '
|
||||
@ -44,12 +44,7 @@ class ListCommand(Command):
|
||||
if args.name:
|
||||
filters['name'] = args.name
|
||||
|
||||
if args.packaged_only:
|
||||
ext_loader = ExtensionLoader()
|
||||
else:
|
||||
ext_loader = ExtensionLoader(packages=settings.extension_packages,
|
||||
paths=settings.extension_paths)
|
||||
results = ext_loader.list_extensions(args.kind[:-1])
|
||||
results = pluginloader.list_plugins(args.kind[:-1])
|
||||
if filters or args.platform:
|
||||
filtered_results = []
|
||||
for result in results:
|
||||
@ -72,8 +67,8 @@ class ListCommand(Command):
|
||||
print output.format_data()
|
||||
|
||||
|
||||
def check_platform(extension, platform):
|
||||
supported_platforms = getattr(extension, 'supported_platforms', [])
|
||||
def check_platform(plugin, platform):
|
||||
supported_platforms = getattr(plugin, 'supported_platforms', [])
|
||||
if supported_platforms:
|
||||
return platform in supported_platforms
|
||||
return True
|
||||
|
@ -16,7 +16,7 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from wlauto import ExtensionLoader, Command, settings
|
||||
from wlauto import PluginLoader, Command, settings
|
||||
from wlauto.common.resources import Executable
|
||||
from wlauto.core.resource import NO_ONE
|
||||
from wlauto.core.resolver import ResourceResolver
|
||||
@ -69,8 +69,8 @@ class RecordCommand(Command):
|
||||
self.validate_args(args)
|
||||
self.logger.info("Connecting to device...")
|
||||
|
||||
ext_loader = ExtensionLoader(packages=settings.extension_packages,
|
||||
paths=settings.extension_paths)
|
||||
ext_loader = PluginLoader(packages=settings.plugin_packages,
|
||||
paths=settings.plugin_paths)
|
||||
|
||||
# Setup config
|
||||
self.config = RunConfiguration(ext_loader)
|
||||
@ -84,7 +84,6 @@ class RecordCommand(Command):
|
||||
# Setup device
|
||||
self.device = ext_loader.get_device(settings.device, **settings.device_config)
|
||||
self.device.validate()
|
||||
self.device.dynamic_modules = []
|
||||
self.device.connect()
|
||||
self.device.initialize(context)
|
||||
|
||||
|
@ -23,7 +23,8 @@ from wlauto import Command, settings
|
||||
from wlauto.core.agenda import Agenda
|
||||
from wlauto.core.execution import Executor
|
||||
from wlauto.utils.log import add_log_file
|
||||
|
||||
from wlauto.core.configuration import RunConfiguration
|
||||
from wlauto.core import pluginloader
|
||||
|
||||
class RunCommand(Command):
|
||||
|
||||
@ -45,7 +46,7 @@ class RunCommand(Command):
|
||||
option (see below) is used, in which case the contents of the
|
||||
directory will be overwritten. If this option is not specified,
|
||||
then {} will be used instead.
|
||||
""".format(settings.output_directory))
|
||||
""".format(settings.default_output_directory))
|
||||
self.parser.add_argument('-f', '--force', action='store_true',
|
||||
help="""
|
||||
Overwrite output directory if it exists. By default, the script
|
||||
@ -69,18 +70,22 @@ class RunCommand(Command):
|
||||
""")
|
||||
|
||||
def execute(self, args): # NOQA
|
||||
self.set_up_output_directory(args)
|
||||
add_log_file(settings.log_file)
|
||||
output_directory = self.set_up_output_directory(args)
|
||||
add_log_file(os.path.join(output_directory, "run.log"))
|
||||
config = RunConfiguration(pluginloader)
|
||||
|
||||
if os.path.isfile(args.agenda):
|
||||
agenda = Agenda(args.agenda)
|
||||
settings.agenda = args.agenda
|
||||
shutil.copy(args.agenda, settings.meta_directory)
|
||||
shutil.copy(args.agenda, config.meta_directory)
|
||||
else:
|
||||
self.logger.debug('{} is not a file; assuming workload name.'.format(args.agenda))
|
||||
agenda = Agenda()
|
||||
agenda.add_workload_entry(args.agenda)
|
||||
|
||||
for filepath in settings.config_paths:
|
||||
config.load_config(filepath)
|
||||
|
||||
if args.instruments_to_disable:
|
||||
if 'instrumentation' not in agenda.config:
|
||||
agenda.config['instrumentation'] = []
|
||||
@ -89,27 +94,29 @@ class RunCommand(Command):
|
||||
agenda.config['instrumentation'].append('~{}'.format(itd))
|
||||
|
||||
basename = 'config_'
|
||||
for file_number, path in enumerate(settings.get_config_paths(), 1):
|
||||
for file_number, path in enumerate(settings.config_paths, 1):
|
||||
file_ext = os.path.splitext(path)[1]
|
||||
shutil.copy(path, os.path.join(settings.meta_directory,
|
||||
shutil.copy(path, os.path.join(meta_directory,
|
||||
basename + str(file_number) + file_ext))
|
||||
|
||||
executor = Executor()
|
||||
executor = Executor(config)
|
||||
executor.execute(agenda, selectors={'ids': args.only_run_ids})
|
||||
|
||||
def set_up_output_directory(self, args):
|
||||
if args.output_directory:
|
||||
settings.output_directory = args.output_directory
|
||||
self.logger.debug('Using output directory: {}'.format(settings.output_directory))
|
||||
if os.path.exists(settings.output_directory):
|
||||
output_directory = args.output_directory
|
||||
else:
|
||||
output_directory = settings.default_output_directory
|
||||
self.logger.debug('Using output directory: {}'.format(output_directory))
|
||||
if os.path.exists(output_directory):
|
||||
if args.force:
|
||||
self.logger.info('Removing existing output directory.')
|
||||
shutil.rmtree(settings.output_directory)
|
||||
shutil.rmtree(os.path.abspath(output_directory))
|
||||
else:
|
||||
self.logger.error('Output directory {} exists.'.format(settings.output_directory))
|
||||
self.logger.error('Output directory {} exists.'.format(output_directory))
|
||||
self.logger.error('Please specify another location, or use -f option to overwrite.\n')
|
||||
sys.exit(1)
|
||||
|
||||
self.logger.info('Creating output directory.')
|
||||
os.makedirs(settings.output_directory)
|
||||
os.makedirs(settings.meta_directory)
|
||||
os.makedirs(output_directory)
|
||||
return output_directory
|
||||
|
@ -18,7 +18,9 @@ import sys
|
||||
import subprocess
|
||||
from cStringIO import StringIO
|
||||
|
||||
from wlauto import Command, ExtensionLoader, settings
|
||||
from wlauto import Command
|
||||
from wlauto.core.config.core import settings
|
||||
from wlauto.core import pluginloader
|
||||
from wlauto.utils.doc import (get_summary, get_description, get_type_name, format_column, format_body,
|
||||
format_paragraph, indent, strip_inlined_text)
|
||||
from wlauto.utils.misc import get_pager
|
||||
@ -30,21 +32,20 @@ class ShowCommand(Command):
|
||||
name = 'show'
|
||||
|
||||
description = """
|
||||
Display documentation for the specified extension (workload, instrument, etc.).
|
||||
Display documentation for the specified plugin (workload, instrument, etc.).
|
||||
"""
|
||||
|
||||
def initialize(self, context):
|
||||
self.parser.add_argument('name', metavar='EXTENSION',
|
||||
help='''The name of the extension for which information will
|
||||
help='''The name of the plugin for which information will
|
||||
be shown.''')
|
||||
|
||||
def execute(self, args):
|
||||
# pylint: disable=unpacking-non-sequence
|
||||
ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths)
|
||||
extension = ext_loader.get_extension_class(args.name)
|
||||
plugin = pluginloader.get_plugin_class(args.name)
|
||||
out = StringIO()
|
||||
term_width, term_height = get_terminal_size()
|
||||
format_extension(extension, out, term_width)
|
||||
format_plugin(plugin, out, term_width)
|
||||
text = out.getvalue()
|
||||
pager = get_pager()
|
||||
if len(text.split('\n')) > term_height and pager:
|
||||
@ -58,44 +59,44 @@ class ShowCommand(Command):
|
||||
sys.stdout.write(text)
|
||||
|
||||
|
||||
def format_extension(extension, out, width):
|
||||
format_extension_name(extension, out)
|
||||
def format_plugin(plugin, out, width):
|
||||
format_plugin_name(plugin, out)
|
||||
out.write('\n')
|
||||
format_extension_summary(extension, out, width)
|
||||
format_plugin_summary(plugin, out, width)
|
||||
out.write('\n')
|
||||
if hasattr(extension, 'supported_platforms'):
|
||||
format_supported_platforms(extension, out, width)
|
||||
if hasattr(plugin, 'supported_platforms'):
|
||||
format_supported_platforms(plugin, out, width)
|
||||
out.write('\n')
|
||||
if extension.parameters:
|
||||
format_extension_parameters(extension, out, width)
|
||||
if plugin.parameters:
|
||||
format_plugin_parameters(plugin, out, width)
|
||||
out.write('\n')
|
||||
format_extension_description(extension, out, width)
|
||||
format_plugin_description(plugin, out, width)
|
||||
|
||||
|
||||
def format_extension_name(extension, out):
|
||||
out.write('\n{}\n'.format(extension.name))
|
||||
def format_plugin_name(plugin, out):
|
||||
out.write('\n{}\n'.format(plugin.name))
|
||||
|
||||
|
||||
def format_extension_summary(extension, out, width):
|
||||
out.write('{}\n'.format(format_body(strip_inlined_text(get_summary(extension)), width)))
|
||||
def format_plugin_summary(plugin, out, width):
|
||||
out.write('{}\n'.format(format_body(strip_inlined_text(get_summary(plugin)), width)))
|
||||
|
||||
|
||||
def format_supported_platforms(extension, out, width):
|
||||
text = 'supported on: {}'.format(', '.join(extension.supported_platforms))
|
||||
def format_supported_platforms(plugin, out, width):
|
||||
text = 'supported on: {}'.format(', '.join(plugin.supported_platforms))
|
||||
out.write('{}\n'.format(format_body(text, width)))
|
||||
|
||||
|
||||
def format_extension_description(extension, out, width):
|
||||
def format_plugin_description(plugin, out, width):
|
||||
# skip the initial paragraph of multi-paragraph description, as already
|
||||
# listed above.
|
||||
description = get_description(extension).split('\n\n', 1)[-1]
|
||||
description = get_description(plugin).split('\n\n', 1)[-1]
|
||||
out.write('{}\n'.format(format_body(strip_inlined_text(description), width)))
|
||||
|
||||
|
||||
def format_extension_parameters(extension, out, width, shift=4):
|
||||
def format_plugin_parameters(plugin, out, width, shift=4):
|
||||
out.write('parameters:\n\n')
|
||||
param_texts = []
|
||||
for param in extension.parameters:
|
||||
for param in plugin.parameters:
|
||||
description = format_paragraph(strip_inlined_text(param.description or ''), width - shift)
|
||||
param_text = '{}'.format(param.name)
|
||||
if param.mandatory:
|
||||
@ -111,4 +112,3 @@ def format_extension_parameters(extension, out, width, shift=4):
|
||||
param_texts.append(indent(param_text, shift))
|
||||
|
||||
out.write(format_column('\n'.join(param_texts), width))
|
||||
|
||||
|
@ -17,10 +17,10 @@ import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from wlauto.core.extension import Parameter
|
||||
from wlauto.core.plugin import Parameter
|
||||
from wlauto.core.workload import Workload
|
||||
from wlauto.core.resource import NO_ONE
|
||||
from wlauto.common.resources import ExtensionAsset, Executable
|
||||
from wlauto.common.resources import PluginAsset, Executable
|
||||
from wlauto.exceptions import WorkloadError, ResourceError, ConfigError
|
||||
from wlauto.utils.android import ApkInfo, ANDROID_NORMAL_PERMISSIONS
|
||||
from wlauto.utils.types import boolean
|
||||
@ -480,7 +480,7 @@ class GameWorkload(ApkWorkload, ReventWorkload):
|
||||
kind, resource_file = resource_file.split(':', 1)
|
||||
ondevice_cache = self.device.path.join(self.device.resource_cache, self.name, resource_file)
|
||||
if not self.device.file_exists(ondevice_cache):
|
||||
asset_tarball = context.resolver.get(ExtensionAsset(self, resource_file))
|
||||
asset_tarball = context.resolver.get(PluginAsset(self, resource_file))
|
||||
if not asset_tarball:
|
||||
message = 'Could not find resource {} for workload {}.'
|
||||
raise WorkloadError(message.format(resource_file, self.name))
|
||||
|
BIN
wlauto/common/bin/x86_64/busybox
Normal file
BIN
wlauto/common/bin/x86_64/busybox
Normal file
Binary file not shown.
@ -43,12 +43,12 @@ class File(FileResource):
|
||||
return '<{}\'s {} {}>'.format(self.owner, self.name, self.path or self.url)
|
||||
|
||||
|
||||
class ExtensionAsset(File):
|
||||
class PluginAsset(File):
|
||||
|
||||
name = 'extension_asset'
|
||||
name = 'plugin_asset'
|
||||
|
||||
def __init__(self, owner, path):
|
||||
super(ExtensionAsset, self).__init__(owner, os.path.join(owner.name, path))
|
||||
super(PluginAsset, self).__init__(owner, os.path.join(owner.name, path))
|
||||
|
||||
|
||||
class Executable(FileResource):
|
||||
|
@ -1,212 +0,0 @@
|
||||
# Copyright 2013-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import re
|
||||
from collections import namedtuple, OrderedDict
|
||||
|
||||
from wlauto.exceptions import ConfigError
|
||||
from wlauto.utils.misc import merge_dicts, normalize, unique
|
||||
from wlauto.utils.misc import load_struct_from_yaml, load_struct_from_python, LoadSyntaxError
|
||||
from wlauto.utils.types import identifier
|
||||
|
||||
|
||||
_this_dir = os.path.dirname(__file__)
|
||||
_user_home = os.path.expanduser('~')
|
||||
|
||||
# loading our external packages over those from the environment
|
||||
sys.path.insert(0, os.path.join(_this_dir, '..', 'external'))
|
||||
|
||||
|
||||
# Defines extension points for the WA framework. This table is used by the
|
||||
# ExtensionLoader (among other places) to identify extensions it should look
|
||||
# for.
|
||||
# Parameters that need to be specified in a tuple for each extension type:
|
||||
# name: The name of the extension type. This will be used to resolve get_
|
||||
# and list_methods in the extension loader.
|
||||
# class: The base class for the extension type. Extension loader will check
|
||||
# whether classes it discovers are subclassed from this.
|
||||
# default package: This is the package that will be searched for extensions
|
||||
# of that type by default (if not other packages are
|
||||
# specified when creating the extension loader). This
|
||||
# package *must* exist.
|
||||
# default path: This is the subdirectory under the environment_root which
|
||||
# will be searched for extensions of this type by default (if
|
||||
# no other paths are specified when creating the extension
|
||||
# loader). This directory will be automatically created if it
|
||||
# does not exist.
|
||||
|
||||
#pylint: disable=C0326
|
||||
_EXTENSION_TYPE_TABLE = [
|
||||
# name, class, default package, default path
|
||||
('command', 'wlauto.core.command.Command', 'wlauto.commands', 'commands'),
|
||||
('device_manager', 'wlauto.core.device_manager.DeviceManager', 'wlauto.managers', 'managers'),
|
||||
('instrument', 'wlauto.core.instrumentation.Instrument', 'wlauto.instrumentation', 'instruments'),
|
||||
('resource_getter', 'wlauto.core.resource.ResourceGetter', 'wlauto.resource_getters', 'resource_getters'),
|
||||
('result_processor', 'wlauto.core.result.ResultProcessor', 'wlauto.result_processors', 'result_processors'),
|
||||
('workload', 'wlauto.core.workload.Workload', 'wlauto.workloads', 'workloads'),
|
||||
]
|
||||
_Extension = namedtuple('_Extension', 'name, cls, default_package, default_path')
|
||||
_extensions = [_Extension._make(ext) for ext in _EXTENSION_TYPE_TABLE] # pylint: disable=W0212
|
||||
|
||||
|
||||
class ConfigLoader(object):
|
||||
"""
|
||||
This class is responsible for loading and validating config files.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._loaded = False
|
||||
self._config = {}
|
||||
self.config_count = 0
|
||||
self.loaded_files = []
|
||||
self.environment_root = None
|
||||
self.output_directory = 'wa_output'
|
||||
self.reboot_after_each_iteration = True
|
||||
self.dependencies_directory = None
|
||||
self.agenda = None
|
||||
self.extension_packages = []
|
||||
self.extension_paths = []
|
||||
self.extensions = []
|
||||
self.verbosity = 0
|
||||
self.debug = False
|
||||
self.package_directory = os.path.dirname(_this_dir)
|
||||
self.commands = {}
|
||||
|
||||
@property
|
||||
def meta_directory(self):
|
||||
return os.path.join(self.output_directory, '__meta')
|
||||
|
||||
@property
|
||||
def log_file(self):
|
||||
return os.path.join(self.output_directory, 'run.log')
|
||||
|
||||
def update(self, source):
|
||||
if isinstance(source, dict):
|
||||
self.update_from_dict(source)
|
||||
else:
|
||||
self.config_count += 1
|
||||
self.update_from_file(source)
|
||||
|
||||
def update_from_file(self, source):
|
||||
ext = os.path.splitext(source)[1].lower() # pylint: disable=redefined-outer-name
|
||||
try:
|
||||
if ext in ['.py', '.pyo', '.pyc']:
|
||||
new_config = load_struct_from_python(source)
|
||||
elif ext == '.yaml':
|
||||
new_config = load_struct_from_yaml(source)
|
||||
else:
|
||||
raise ConfigError('Unknown config format: {}'.format(source))
|
||||
except LoadSyntaxError as e:
|
||||
raise ConfigError(e)
|
||||
|
||||
self._config = merge_dicts(self._config, new_config,
|
||||
list_duplicates='first',
|
||||
match_types=False,
|
||||
dict_type=OrderedDict)
|
||||
self.loaded_files.append(source)
|
||||
self._loaded = True
|
||||
|
||||
def update_from_dict(self, source):
|
||||
normalized_source = dict((identifier(k), v) for k, v in source.iteritems())
|
||||
self._config = merge_dicts(self._config, normalized_source, list_duplicates='first',
|
||||
match_types=False, dict_type=OrderedDict)
|
||||
self._loaded = True
|
||||
|
||||
def get_config_paths(self):
|
||||
return [lf.rstrip('c') for lf in self.loaded_files]
|
||||
|
||||
def _check_loaded(self):
|
||||
if not self._loaded:
|
||||
raise ConfigError('Config file not loaded.')
|
||||
|
||||
def __getattr__(self, name):
|
||||
self._check_loaded()
|
||||
return self._config.get(normalize(name))
|
||||
|
||||
|
||||
def init_environment(env_root, dep_dir, extension_paths, overwrite_existing=False): # pylint: disable=R0914
|
||||
"""Initialise a fresh user environment creating the workload automation"""
|
||||
if os.path.exists(env_root):
|
||||
if not overwrite_existing:
|
||||
raise ConfigError('Environment {} already exists.'.format(env_root))
|
||||
shutil.rmtree(env_root)
|
||||
|
||||
os.makedirs(env_root)
|
||||
with open(os.path.join(_this_dir, '..', 'config_example.py')) as rf:
|
||||
text = re.sub(r'""".*?"""', '', rf.read(), 1, re.DOTALL)
|
||||
with open(os.path.join(_env_root, 'config.py'), 'w') as wf:
|
||||
wf.write(text)
|
||||
|
||||
os.makedirs(dep_dir)
|
||||
for path in extension_paths:
|
||||
os.makedirs(path)
|
||||
|
||||
if os.getenv('USER') == 'root':
|
||||
# If running with sudo on POSIX, change the ownership to the real user.
|
||||
real_user = os.getenv('SUDO_USER')
|
||||
if real_user:
|
||||
import pwd # done here as module won't import on win32
|
||||
user_entry = pwd.getpwnam(real_user)
|
||||
uid, gid = user_entry.pw_uid, user_entry.pw_gid
|
||||
os.chown(env_root, uid, gid)
|
||||
# why, oh why isn't there a recusive=True option for os.chown?
|
||||
for root, dirs, files in os.walk(env_root):
|
||||
for d in dirs:
|
||||
os.chown(os.path.join(root, d), uid, gid)
|
||||
for f in files: # pylint: disable=W0621
|
||||
os.chown(os.path.join(root, f), uid, gid)
|
||||
|
||||
|
||||
_env_root = os.getenv('WA_USER_DIRECTORY', os.path.join(_user_home, '.workload_automation'))
|
||||
_dep_dir = os.path.join(_env_root, 'dependencies')
|
||||
_extension_paths = [os.path.join(_env_root, ext.default_path) for ext in _extensions]
|
||||
_env_var_paths = os.getenv('WA_EXTENSION_PATHS', '')
|
||||
if _env_var_paths:
|
||||
_extension_paths.extend(_env_var_paths.split(os.pathsep))
|
||||
|
||||
_env_configs = []
|
||||
for filename in ['config.py', 'config.yaml']:
|
||||
filepath = os.path.join(_env_root, filename)
|
||||
if os.path.isfile(filepath):
|
||||
_env_configs.append(filepath)
|
||||
|
||||
if not os.path.isdir(_env_root):
|
||||
init_environment(_env_root, _dep_dir, _extension_paths)
|
||||
elif not _env_configs:
|
||||
filepath = os.path.join(_env_root, 'config.py')
|
||||
with open(os.path.join(_this_dir, '..', 'config_example.py')) as f:
|
||||
f_text = re.sub(r'""".*?"""', '', f.read(), 1, re.DOTALL)
|
||||
with open(filepath, 'w') as f:
|
||||
f.write(f_text)
|
||||
_env_configs.append(filepath)
|
||||
|
||||
settings = ConfigLoader()
|
||||
settings.environment_root = _env_root
|
||||
settings.dependencies_directory = _dep_dir
|
||||
settings.extension_paths = _extension_paths
|
||||
settings.extensions = _extensions
|
||||
|
||||
_packages_file = os.path.join(_env_root, 'packages')
|
||||
if os.path.isfile(_packages_file):
|
||||
with open(_packages_file) as fh:
|
||||
settings.extension_packages = unique(fh.read().split())
|
||||
|
||||
for config in _env_configs:
|
||||
settings.update(config)
|
@ -15,12 +15,12 @@
|
||||
|
||||
import textwrap
|
||||
|
||||
from wlauto.core.extension import Extension
|
||||
from wlauto.core.plugin import Plugin
|
||||
from wlauto.core.entry_point import init_argument_parser
|
||||
from wlauto.utils.doc import format_body
|
||||
|
||||
|
||||
class Command(Extension):
|
||||
class Command(Plugin):
|
||||
"""
|
||||
Defines a Workload Automation command. This will be executed from the command line as
|
||||
``wa <command> [args ...]``. This defines the name to be used when invoking wa, the
|
||||
@ -28,7 +28,7 @@ class Command(Extension):
|
||||
to parse the reset of the command line arguments.
|
||||
|
||||
"""
|
||||
|
||||
kind = "command"
|
||||
help = None
|
||||
usage = None
|
||||
description = None
|
||||
|
2
wlauto/core/config/__init__.py
Normal file
2
wlauto/core/config/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
from wlauto.core.config.core import settings, ConfigurationPoint, PluginConfiguration
|
||||
from wlauto.core.config.core import merge_config_values, WA_CONFIGURATION
|
650
wlauto/core/config/core.py
Normal file
650
wlauto/core/config/core.py
Normal file
@ -0,0 +1,650 @@
|
||||
import os
|
||||
import logging
|
||||
import shutil
|
||||
from glob import glob
|
||||
from copy import copy
|
||||
from itertools import chain
|
||||
|
||||
from wlauto.core import pluginloader
|
||||
from wlauto.exceptions import ConfigError
|
||||
from wlauto.utils.types import integer, boolean, identifier, list_of_strings
|
||||
from wlauto.utils.misc import isiterable, get_article
|
||||
from wlauto.utils.serializer import read_pod, yaml
|
||||
|
||||
|
||||
class ConfigurationPoint(object):
|
||||
"""
|
||||
This defines a gneric configuration point for workload automation. This is
|
||||
used to handle global settings, plugin parameters, etc.
|
||||
|
||||
"""
|
||||
|
||||
# Mapping for kind conversion; see docs for convert_types below
|
||||
kind_map = {
|
||||
int: integer,
|
||||
bool: boolean,
|
||||
}
|
||||
|
||||
def __init__(self, name,
|
||||
kind=None,
|
||||
mandatory=None,
|
||||
default=None,
|
||||
override=False,
|
||||
allowed_values=None,
|
||||
description=None,
|
||||
constraint=None,
|
||||
merge=False,
|
||||
aliases=None,
|
||||
convert_types=True):
|
||||
"""
|
||||
Create a new Parameter object.
|
||||
|
||||
:param name: The name of the parameter. This will become an instance
|
||||
member of the plugin object to which the parameter is
|
||||
applied, so it must be a valid python identifier. This
|
||||
is the only mandatory parameter.
|
||||
:param kind: The type of parameter this is. This must be a callable
|
||||
that takes an arbitrary object and converts it to the
|
||||
expected type, or raised ``ValueError`` if such conversion
|
||||
is not possible. Most Python standard types -- ``str``,
|
||||
``int``, ``bool``, etc. -- can be used here. This
|
||||
defaults to ``str`` if not specified.
|
||||
:param mandatory: If set to ``True``, then a non-``None`` value for
|
||||
this parameter *must* be provided on plugin
|
||||
object construction, otherwise ``ConfigError``
|
||||
will be raised.
|
||||
:param default: The default value for this parameter. If no value
|
||||
is specified on plugin construction, this value
|
||||
will be used instead. (Note: if this is specified
|
||||
and is not ``None``, then ``mandatory`` parameter
|
||||
will be ignored).
|
||||
:param override: A ``bool`` that specifies whether a parameter of
|
||||
the same name further up the hierarchy should
|
||||
be overridden. If this is ``False`` (the
|
||||
default), an exception will be raised by the
|
||||
``AttributeCollection`` instead.
|
||||
:param allowed_values: This should be the complete list of allowed
|
||||
values for this parameter. Note: ``None``
|
||||
value will always be allowed, even if it is
|
||||
not in this list. If you want to disallow
|
||||
``None``, set ``mandatory`` to ``True``.
|
||||
:param constraint: If specified, this must be a callable that takes
|
||||
the parameter value as an argument and return a
|
||||
boolean indicating whether the constraint has been
|
||||
satisfied. Alternatively, can be a two-tuple with
|
||||
said callable as the first element and a string
|
||||
describing the constraint as the second.
|
||||
:param merge: The default behaviour when setting a value on an object
|
||||
that already has that attribute is to overrided with
|
||||
the new value. If this is set to ``True`` then the two
|
||||
values will be merged instead. The rules by which the
|
||||
values are merged will be determined by the types of
|
||||
the existing and new values -- see
|
||||
``merge_config_values`` documentation for details.
|
||||
:param aliases: Alternative names for the same configuration point.
|
||||
These are largely for backwards compatibility.
|
||||
:param convert_types: If ``True`` (the default), will automatically
|
||||
convert ``kind`` values from native Python
|
||||
types to WA equivalents. This allows more
|
||||
ituitive interprestation of parameter values,
|
||||
e.g. the string ``"false"`` being interpreted
|
||||
as ``False`` when specifed as the value for
|
||||
a boolean Parameter.
|
||||
|
||||
"""
|
||||
self.name = identifier(name)
|
||||
if kind is not None and not callable(kind):
|
||||
raise ValueError('Kind must be callable.')
|
||||
if convert_types and kind in self.kind_map:
|
||||
kind = self.kind_map[kind]
|
||||
self.kind = kind
|
||||
self.mandatory = mandatory
|
||||
self.default = default
|
||||
self.override = override
|
||||
self.allowed_values = allowed_values
|
||||
self.description = description
|
||||
if self.kind is None and not self.override:
|
||||
self.kind = str
|
||||
if constraint is not None and not callable(constraint) and not isinstance(constraint, tuple):
|
||||
raise ValueError('Constraint must be callable or a (callable, str) tuple.')
|
||||
self.constraint = constraint
|
||||
self.merge = merge
|
||||
self.aliases = aliases or []
|
||||
|
||||
def match(self, name):
|
||||
if name == self.name:
|
||||
return True
|
||||
elif name in self.aliases:
|
||||
return True
|
||||
return False
|
||||
|
||||
def set_value(self, obj, value=None):
|
||||
if value is None:
|
||||
if self.default is not None:
|
||||
value = self.default
|
||||
elif self.mandatory:
|
||||
msg = 'No values specified for mandatory parameter {} in {}'
|
||||
raise ConfigError(msg.format(self.name, obj.name))
|
||||
else:
|
||||
try:
|
||||
value = self.kind(value)
|
||||
except (ValueError, TypeError):
|
||||
typename = self.get_type_name()
|
||||
msg = 'Bad value "{}" for {}; must be {} {}'
|
||||
article = get_article(typename)
|
||||
raise ConfigError(msg.format(value, self.name, article, typename))
|
||||
if self.merge and hasattr(obj, self.name):
|
||||
value = merge_config_values(getattr(obj, self.name), value)
|
||||
setattr(obj, self.name, value)
|
||||
|
||||
def validate(self, obj):
|
||||
value = getattr(obj, self.name, None)
|
||||
|
||||
if value is not None:
|
||||
if self.allowed_values:
|
||||
self._validate_allowed_values(obj, value)
|
||||
if self.constraint:
|
||||
self._validate_constraint(obj, value)
|
||||
else:
|
||||
if self.mandatory:
|
||||
msg = 'No value specified for mandatory parameter {} in {}.'
|
||||
raise ConfigError(msg.format(self.name, obj.name))
|
||||
|
||||
def get_type_name(self):
|
||||
typename = str(self.kind)
|
||||
if '\'' in typename:
|
||||
typename = typename.split('\'')[1]
|
||||
elif typename.startswith('<function'):
|
||||
typename = typename.split()[1]
|
||||
return typename
|
||||
|
||||
def _validate_allowed_values(self, obj, value):
|
||||
if 'list' in str(self.kind):
|
||||
for v in value:
|
||||
if v not in self.allowed_values:
|
||||
msg = 'Invalid value {} for {} in {}; must be in {}'
|
||||
raise ConfigError(msg.format(v, self.name, obj.name, self.allowed_values))
|
||||
else:
|
||||
if value not in self.allowed_values:
|
||||
msg = 'Invalid value {} for {} in {}; must be in {}'
|
||||
raise ConfigError(msg.format(value, self.name, obj.name, self.allowed_values))
|
||||
|
||||
def _validate_constraint(self, obj, value):
|
||||
msg_vals = {'value': value, 'param': self.name, 'plugin': obj.name}
|
||||
if isinstance(self.constraint, tuple) and len(self.constraint) == 2:
|
||||
constraint, msg = self.constraint # pylint: disable=unpacking-non-sequence
|
||||
elif callable(self.constraint):
|
||||
constraint = self.constraint
|
||||
msg = '"{value}" failed constraint validation for {param} in {plugin}.'
|
||||
else:
|
||||
raise ValueError('Invalid constraint for {}: must be callable or a 2-tuple'.format(self.name))
|
||||
if not constraint(value):
|
||||
raise ConfigError(value, msg.format(**msg_vals))
|
||||
|
||||
def __repr__(self):
|
||||
d = copy(self.__dict__)
|
||||
del d['description']
|
||||
return 'ConfPoint({})'.format(d)
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
|
||||
class ConfigurationPointCollection(object):
|
||||
|
||||
def __init__(self):
|
||||
self._configs = []
|
||||
self._config_map = {}
|
||||
|
||||
def get(self, name, default=None):
|
||||
return self._config_map.get(name, default)
|
||||
|
||||
def add(self, point):
|
||||
if not isinstance(point, ConfigurationPoint):
|
||||
raise ValueError('Mustbe a ConfigurationPoint, got {}'.format(point.__class__))
|
||||
existing = self.get(point.name)
|
||||
if existing:
|
||||
if point.override:
|
||||
new_point = copy(existing)
|
||||
for a, v in point.__dict__.iteritems():
|
||||
if v is not None:
|
||||
setattr(new_point, a, v)
|
||||
self.remove(existing)
|
||||
point = new_point
|
||||
else:
|
||||
raise ValueError('Duplicate ConfigurationPoint "{}"'.format(point.name))
|
||||
self._add(point)
|
||||
|
||||
def remove(self, point):
|
||||
self._configs.remove(point)
|
||||
del self._config_map[point.name]
|
||||
for alias in point.aliases:
|
||||
del self._config_map[alias]
|
||||
|
||||
append = add
|
||||
|
||||
def _add(self, point):
|
||||
self._configs.append(point)
|
||||
self._config_map[point.name] = point
|
||||
for alias in point.aliases:
|
||||
if alias in self._config_map:
|
||||
message = 'Clashing alias "{}" between "{}" and "{}"'
|
||||
raise ValueError(message.format(alias, point.name,
|
||||
self._config_map[alias].name))
|
||||
|
||||
def __str__(self):
|
||||
str(self._configs)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
def __iadd__(self, other):
|
||||
for p in other:
|
||||
self.add(p)
|
||||
return self
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._configs)
|
||||
|
||||
def __contains__(self, p):
|
||||
if isinstance(p, basestring):
|
||||
return p in self._config_map
|
||||
return p.name in self._config_map
|
||||
|
||||
def __getitem__(self, i):
|
||||
if isinstance(i, int):
|
||||
return self._configs[i]
|
||||
return self._config_map[i]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._configs)
|
||||
|
||||
|
||||
class LoggingConfig(dict):
|
||||
|
||||
defaults = {
|
||||
'file_format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',
|
||||
'verbose_format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',
|
||||
'regular_format': '%(levelname)-8s %(message)s',
|
||||
'color': True,
|
||||
}
|
||||
|
||||
def __init__(self, config=None):
|
||||
dict.__init__(self)
|
||||
if isinstance(config, dict):
|
||||
config = {identifier(k.lower()): v for k, v in config.iteritems()}
|
||||
self['regular_format'] = config.pop('regular_format', self.defaults['regular_format'])
|
||||
self['verbose_format'] = config.pop('verbose_format', self.defaults['verbose_format'])
|
||||
self['file_format'] = config.pop('file_format', self.defaults['file_format'])
|
||||
self['color'] = config.pop('colour_enabled', self.defaults['color']) # legacy
|
||||
self['color'] = config.pop('color', self.defaults['color'])
|
||||
if config:
|
||||
message = 'Unexpected logging configuation parameters: {}'
|
||||
raise ValueError(message.format(bad_vals=', '.join(config.keys())))
|
||||
elif config is None:
|
||||
for k, v in self.defaults.iteritems():
|
||||
self[k] = v
|
||||
else:
|
||||
raise ValueError(config)
|
||||
|
||||
|
||||
__WA_CONFIGURATION = [
|
||||
ConfigurationPoint(
|
||||
'user_directory',
|
||||
description="""
|
||||
Path to the user directory. This is the location WA will look for
|
||||
user configuration, additional plugins and plugin dependencies.
|
||||
""",
|
||||
kind=str,
|
||||
default=os.path.join(os.path.expanduser('~'), '.workload_automation'),
|
||||
),
|
||||
ConfigurationPoint(
|
||||
'plugin_packages',
|
||||
kind=list_of_strings,
|
||||
default=[
|
||||
'wlauto.commands',
|
||||
'wlauto.workloads',
|
||||
'wlauto.instrumentation',
|
||||
'wlauto.result_processors',
|
||||
'wlauto.managers',
|
||||
'wlauto.resource_getters',
|
||||
],
|
||||
description="""
|
||||
List of packages that will be scanned for WA plugins.
|
||||
""",
|
||||
),
|
||||
ConfigurationPoint(
|
||||
'plugin_paths',
|
||||
kind=list_of_strings,
|
||||
default=[
|
||||
'workloads',
|
||||
'instruments',
|
||||
'targets',
|
||||
'processors',
|
||||
|
||||
# Legacy
|
||||
'managers',
|
||||
'result_processors',
|
||||
],
|
||||
description="""
|
||||
List of paths that will be scanned for WA plugins.
|
||||
""",
|
||||
),
|
||||
ConfigurationPoint(
|
||||
'plugin_ignore_paths',
|
||||
kind=list_of_strings,
|
||||
default=[],
|
||||
description="""
|
||||
List of (sub)paths that will be ignored when scanning
|
||||
``plugin_paths`` for WA plugins.
|
||||
""",
|
||||
),
|
||||
ConfigurationPoint(
|
||||
'filer_mount_point',
|
||||
description="""
|
||||
The local mount point for the filer hosting WA assets.
|
||||
""",
|
||||
),
|
||||
ConfigurationPoint(
|
||||
'logging',
|
||||
kind=LoggingConfig,
|
||||
default=LoggingConfig.defaults,
|
||||
description="""
|
||||
WA logging configuration. This should be a dict with a subset
|
||||
of the following keys::
|
||||
|
||||
:normal_format: Logging format used for console output
|
||||
:verbose_format: Logging format used for verbose console output
|
||||
:file_format: Logging format used for run.log
|
||||
:color: If ``True`` (the default), console logging output will
|
||||
contain bash color escape codes. Set this to ``False`` if
|
||||
console output will be piped somewhere that does not know
|
||||
how to handle those.
|
||||
""",
|
||||
),
|
||||
ConfigurationPoint(
|
||||
'verbosity',
|
||||
kind=int,
|
||||
default=0,
|
||||
description="""
|
||||
Verbosity of console output.
|
||||
""",
|
||||
),
|
||||
ConfigurationPoint(
|
||||
'default_output_directory',
|
||||
default="wa_output",
|
||||
description="""
|
||||
The default output directory that will be created if not
|
||||
specified when invoking a run.
|
||||
""",
|
||||
),
|
||||
]
|
||||
|
||||
WA_CONFIGURATION = {cp.name: cp for cp in __WA_CONFIGURATION}
|
||||
|
||||
ENVIRONMENT_VARIABLES = {
|
||||
'WA_USER_DIRECTORY': WA_CONFIGURATION['user_directory'],
|
||||
'WA_PLUGIN_PATHS': WA_CONFIGURATION['plugin_paths'],
|
||||
'WA_EXTENSION_PATHS': WA_CONFIGURATION['plugin_paths'], # plugin_paths (legacy)
|
||||
}
|
||||
|
||||
|
||||
class WAConfiguration(object):
|
||||
"""
|
||||
This is configuration for Workload Automation framework as a whole. This
|
||||
does not track configuration for WA runs. Rather, this tracks "meta"
|
||||
configuration, such as various locations WA looks for things, logging
|
||||
configuration etc.
|
||||
|
||||
"""
|
||||
|
||||
basename = 'config'
|
||||
|
||||
@property
|
||||
def dependencies_directory(self):
|
||||
return os.path.join(self.user_directory, 'dependencies')
|
||||
|
||||
def __init__(self):
|
||||
self.user_directory = ''
|
||||
self.plugin_packages = []
|
||||
self.plugin_paths = []
|
||||
self.plugin_ignore_paths = []
|
||||
self.config_paths = []
|
||||
self.logging = {}
|
||||
self._logger = logging.getLogger('settings')
|
||||
for confpoint in WA_CONFIGURATION.itervalues():
|
||||
confpoint.set_value(self)
|
||||
|
||||
def load_environment(self):
|
||||
for name, confpoint in ENVIRONMENT_VARIABLES.iteritems():
|
||||
value = os.getenv(name)
|
||||
if value:
|
||||
confpoint.set_value(self, value)
|
||||
self._expand_paths()
|
||||
|
||||
def load_config_file(self, path):
|
||||
self.load(read_pod(path))
|
||||
if path not in self.config_paths:
|
||||
self.config_paths.append(config_paths)
|
||||
|
||||
def load_user_config(self):
|
||||
globpath = os.path.join(self.user_directory, '{}.*'.format(self.basename))
|
||||
for path in glob(globpath):
|
||||
ext = os.path.splitext(path)[1].lower()
|
||||
if ext in ['.pyc', '.pyo']:
|
||||
continue
|
||||
self.load_config_file(path)
|
||||
|
||||
def load(self, config):
|
||||
for name, value in config.iteritems():
|
||||
if name in WA_CONFIGURATION:
|
||||
confpoint = WA_CONFIGURATION[name]
|
||||
confpoint.set_value(self, value)
|
||||
self._expand_paths()
|
||||
|
||||
def set(self, name, value):
|
||||
if name not in WA_CONFIGURATION:
|
||||
raise ConfigError('Unknown WA configuration "{}"'.format(name))
|
||||
WA_CONFIGURATION[name].set_value(self, value)
|
||||
|
||||
def initialize_user_directory(self, overwrite=False):
|
||||
"""
|
||||
Initialize a fresh user environment creating the workload automation.
|
||||
|
||||
"""
|
||||
if os.path.exists(self.user_directory):
|
||||
if not overwrite:
|
||||
raise ConfigError('Environment {} already exists.'.format(self.user_directory))
|
||||
shutil.rmtree(self.user_directory)
|
||||
|
||||
self._expand_paths()
|
||||
os.makedirs(self.dependencies_directory)
|
||||
for path in self.plugin_paths:
|
||||
os.makedirs(path)
|
||||
|
||||
with open(os.path.join(self.user_directory, 'config.yaml'), 'w') as _:
|
||||
yaml.dump(self.to_pod())
|
||||
|
||||
if os.getenv('USER') == 'root':
|
||||
# If running with sudo on POSIX, change the ownership to the real user.
|
||||
real_user = os.getenv('SUDO_USER')
|
||||
if real_user:
|
||||
import pwd # done here as module won't import on win32
|
||||
user_entry = pwd.getpwnam(real_user)
|
||||
uid, gid = user_entry.pw_uid, user_entry.pw_gid
|
||||
os.chown(self.user_directory, uid, gid)
|
||||
# why, oh why isn't there a recusive=True option for os.chown?
|
||||
for root, dirs, files in os.walk(self.user_directory):
|
||||
for d in dirs:
|
||||
os.chown(os.path.join(root, d), uid, gid)
|
||||
for f in files:
|
||||
os.chown(os.path.join(root, f), uid, gid)
|
||||
|
||||
@staticmethod
|
||||
def from_pod(pod):
|
||||
instance = WAConfiguration()
|
||||
instance.load(pod)
|
||||
return instance
|
||||
|
||||
def to_pod(self):
|
||||
return dict(
|
||||
user_directory=self.user_directory,
|
||||
plugin_packages=self.plugin_packages,
|
||||
plugin_paths=self.plugin_paths,
|
||||
plugin_ignore_paths=self.plugin_ignore_paths,
|
||||
logging=self.logging,
|
||||
)
|
||||
|
||||
def _expand_paths(self):
|
||||
self.dependencies_directory = os.path.join(self.user_directory,
|
||||
self.dependencies_directory)
|
||||
expanded = []
|
||||
for path in self.plugin_paths:
|
||||
path = os.path.expanduser(path)
|
||||
path = os.path.expandvars(path)
|
||||
expanded.append(os.path.join(self.user_directory, path))
|
||||
self.plugin_paths = expanded
|
||||
expanded = []
|
||||
for path in self.plugin_ignore_paths:
|
||||
path = os.path.expanduser(path)
|
||||
path = os.path.expandvars(path)
|
||||
expanded.append(os.path.join(self.user_directory, path))
|
||||
self.plugin_ignore_paths = expanded
|
||||
|
||||
|
||||
class PluginConfiguration(object):
|
||||
""" Maintains a mapping of plugin_name --> plugin_config. """
|
||||
|
||||
def __init__(self, loader=pluginloader):
|
||||
self.loader = loader
|
||||
self.config = {}
|
||||
|
||||
def update(self, name, config):
|
||||
if not hasattr(config, 'get'):
|
||||
raise ValueError('config must be a dict-like object got: {}'.format(config))
|
||||
name, alias_config = self.loader.resolve_alias(name)
|
||||
existing_config = self.config.get(name)
|
||||
if existing_config is None:
|
||||
existing_config = alias_config
|
||||
|
||||
new_config = config or {}
|
||||
self.config[name] = merge_config_values(existing_config, new_config)
|
||||
|
||||
|
||||
def merge_config_values(base, other):
|
||||
"""
|
||||
This is used to merge two objects, typically when setting the value of a
|
||||
``ConfigurationPoint``. First, both objects are categorized into
|
||||
|
||||
c: A scalar value. Basically, most objects. These values
|
||||
are treated as atomic, and not mergeable.
|
||||
s: A sequence. Anything iterable that is not a dict or
|
||||
a string (strings are considered scalars).
|
||||
m: A key-value mapping. ``dict`` and it's derivatives.
|
||||
n: ``None``.
|
||||
o: A mergeable object; this is an object that implements both
|
||||
``merge_with`` and ``merge_into`` methods.
|
||||
|
||||
The merge rules based on the two categories are then as follows:
|
||||
|
||||
(c1, c2) --> c2
|
||||
(s1, s2) --> s1 . s2
|
||||
(m1, m2) --> m1 . m2
|
||||
(c, s) --> [c] . s
|
||||
(s, c) --> s . [c]
|
||||
(s, m) --> s . [m]
|
||||
(m, s) --> [m] . s
|
||||
(m, c) --> ERROR
|
||||
(c, m) --> ERROR
|
||||
(o, X) --> o.merge_with(X)
|
||||
(X, o) --> o.merge_into(X)
|
||||
(X, n) --> X
|
||||
(n, X) --> X
|
||||
|
||||
where:
|
||||
|
||||
'.' means concatenation (for maps, contcationation of (k, v) streams
|
||||
then converted back into a map). If the types of the two objects
|
||||
differ, the type of ``other`` is used for the result.
|
||||
'X' means "any category"
|
||||
'[]' used to indicate a literal sequence (not necessarily a ``list``).
|
||||
when this is concatenated with an actual sequence, that sequencies
|
||||
type is used.
|
||||
|
||||
notes:
|
||||
|
||||
- When a mapping is combined with a sequence, that mapping is
|
||||
treated as a scalar value.
|
||||
- When combining two mergeable objects, they're combined using
|
||||
``o1.merge_with(o2)`` (_not_ using o2.merge_into(o1)).
|
||||
- Combining anything with ``None`` yields that value, irrespective
|
||||
of the order. So a ``None`` value is eqivalent to the corresponding
|
||||
item being omitted.
|
||||
- When both values are scalars, merging is equivalent to overwriting.
|
||||
- There is no recursion (e.g. if map values are lists, they will not
|
||||
be merged; ``other`` will overwrite ``base`` values). If complicated
|
||||
merging semantics (such as recursion) are required, they should be
|
||||
implemented within custom mergeable types (i.e. those that implement
|
||||
``merge_with`` and ``merge_into``).
|
||||
|
||||
While this can be used as a generic "combine any two arbitry objects"
|
||||
function, the semantics have been selected specifically for merging
|
||||
configuration point values.
|
||||
|
||||
"""
|
||||
cat_base = categorize(base)
|
||||
cat_other = categorize(other)
|
||||
|
||||
if cat_base == 'n':
|
||||
return other
|
||||
elif cat_other == 'n':
|
||||
return base
|
||||
|
||||
if cat_base == 'o':
|
||||
return base.merge_with(other)
|
||||
elif cat_other == 'o':
|
||||
return other.merge_into(base)
|
||||
|
||||
if cat_base == 'm':
|
||||
if cat_other == 's':
|
||||
return merge_sequencies([base], other)
|
||||
elif cat_other == 'm':
|
||||
return merge_maps(base, other)
|
||||
else:
|
||||
message = 'merge error ({}, {}): "{}" and "{}"'
|
||||
raise ValueError(message.format(cat_base, cat_other, base, other))
|
||||
elif cat_base == 's':
|
||||
if cat_other == 's':
|
||||
return merge_sequencies(base, other)
|
||||
else:
|
||||
return merge_sequencies(base, [other])
|
||||
else: # cat_base == 'c'
|
||||
if cat_other == 's':
|
||||
return merge_sequencies([base], other)
|
||||
elif cat_other == 'm':
|
||||
message = 'merge error ({}, {}): "{}" and "{}"'
|
||||
raise ValueError(message.format(cat_base, cat_other, base, other))
|
||||
else:
|
||||
return other
|
||||
|
||||
|
||||
def merge_sequencies(s1, s2):
|
||||
return type(s2)(chain(s1, s2))
|
||||
|
||||
|
||||
def merge_maps(m1, m2):
|
||||
return type(m2)(chain(m1.iteritems(), m2.iteritems()))
|
||||
|
||||
|
||||
def categorize(v):
|
||||
if hasattr(v, 'merge_with') and hasattr(v, 'merge_into'):
|
||||
return 'o'
|
||||
elif hasattr(v, 'iteritems'):
|
||||
return 'm'
|
||||
elif isiterable(v):
|
||||
return 's'
|
||||
elif v is None:
|
||||
return 'n'
|
||||
else:
|
||||
return 'c'
|
||||
|
||||
|
||||
settings = WAConfiguration()
|
@ -22,6 +22,8 @@ from collections import OrderedDict
|
||||
from wlauto.exceptions import ConfigError
|
||||
from wlauto.utils.misc import merge_dicts, merge_lists, load_struct_from_file
|
||||
from wlauto.utils.types import regex_type, identifier
|
||||
from wlauto.core.config.core import settings
|
||||
from wlauto.core import pluginloader
|
||||
|
||||
|
||||
class SharedConfiguration(object):
|
||||
@ -313,6 +315,11 @@ class RunConfigurationItem(object):
|
||||
|
||||
return value
|
||||
|
||||
def __str__(self):
|
||||
return "RCI(name: {}, category: {}, method: {})".format(self.name, self.category, self.method)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
def _combine_ids(*args):
|
||||
return '_'.join(args)
|
||||
@ -334,8 +341,8 @@ class RunConfiguration(object):
|
||||
the implementation gets rather complicated. This is going to be a quick overview of
|
||||
the underlying mechanics.
|
||||
|
||||
.. note:: You don't need to know this to use WA, or to write extensions for it. From
|
||||
the point of view of extension writers, configuration from various sources
|
||||
.. note:: You don't need to know this to use WA, or to write plugins for it. From
|
||||
the point of view of plugin writers, configuration from various sources
|
||||
"magically" appears as attributes of their classes. This explanation peels
|
||||
back the curtain and is intended for those who, for one reason or another,
|
||||
need to understand how the magic works.
|
||||
@ -353,7 +360,7 @@ class RunConfiguration(object):
|
||||
config(uration) item
|
||||
|
||||
A single configuration entry or "setting", e.g. the device interface to use. These
|
||||
can be for the run as a whole, or for a specific extension.
|
||||
can be for the run as a whole, or for a specific plugin.
|
||||
|
||||
(workload) spec
|
||||
|
||||
@ -366,7 +373,7 @@ class RunConfiguration(object):
|
||||
There are three types of WA configuration:
|
||||
|
||||
1. "Meta" configuration that determines how the rest of the configuration is
|
||||
processed (e.g. where extensions get loaded from). Since this does not pertain
|
||||
processed (e.g. where plugins get loaded from). Since this does not pertain
|
||||
to *run* configuration, it will not be covered further.
|
||||
2. Global run configuration, e.g. which workloads, result processors and instruments
|
||||
will be enabled for a run.
|
||||
@ -379,16 +386,16 @@ class RunConfiguration(object):
|
||||
Run configuration may appear in a config file (usually ``~/.workload_automation/config.py``),
|
||||
or in the ``config`` section of an agenda. Configuration is specified as a nested structure
|
||||
of dictionaries (associative arrays, or maps) and lists in the syntax following the format
|
||||
implied by the file extension (currently, YAML and Python are supported). If the same
|
||||
implied by the file plugin (currently, YAML and Python are supported). If the same
|
||||
configuration item appears in more than one source, they are merged with conflicting entries
|
||||
taking the value from the last source that specified them.
|
||||
|
||||
In addition to a fixed set of global configuration items, configuration for any WA
|
||||
Extension (instrument, result processor, etc) may also be specified, namespaced under
|
||||
the extension's name (i.e. the extensions name is a key in the global config with value
|
||||
being a dict of parameters and their values). Some Extension parameters also specify a
|
||||
Plugin (instrument, result processor, etc) may also be specified, namespaced under
|
||||
the plugin's name (i.e. the plugins name is a key in the global config with value
|
||||
being a dict of parameters and their values). Some Plugin parameters also specify a
|
||||
"global alias" that may appear at the top-level of the config rather than under the
|
||||
Extension's name. It is *not* an error to specify configuration for an Extension that has
|
||||
Plugin's name. It is *not* an error to specify configuration for an Plugin that has
|
||||
not been enabled for a particular run; such configuration will be ignored.
|
||||
|
||||
|
||||
@ -408,11 +415,11 @@ class RunConfiguration(object):
|
||||
|
||||
**Global parameter aliases**
|
||||
|
||||
As mentioned above, an Extension's parameter may define a global alias, which will be
|
||||
As mentioned above, an Plugin's parameter may define a global alias, which will be
|
||||
specified and picked up from the top-level config, rather than config for that specific
|
||||
extension. It is an error to specify the value for a parameter both through a global
|
||||
alias and through extension config dict in the same configuration file. It is, however,
|
||||
possible to use a global alias in one file, and specify extension configuration for the
|
||||
plugin. It is an error to specify the value for a parameter both through a global
|
||||
alias and through plugin config dict in the same configuration file. It is, however,
|
||||
possible to use a global alias in one file, and specify plugin configuration for the
|
||||
same parameter in another file, in which case, the usual merging rules would apply.
|
||||
|
||||
**Loading and validation of configuration**
|
||||
@ -425,50 +432,50 @@ class RunConfiguration(object):
|
||||
This is done by the loading mechanism (e.g. YAML parser), rather than WA itself. WA
|
||||
propagates any errors encountered as ``ConfigError``\ s.
|
||||
- Once a config file is loaded into a Python structure, it scanned to
|
||||
extract settings. Static configuration is validated and added to the config. Extension
|
||||
extract settings. Static configuration is validated and added to the config. Plugin
|
||||
configuration is collected into a collection of "raw" config, and merged as appropriate, but
|
||||
is not processed further at this stage.
|
||||
- Once all configuration sources have been processed, the configuration as a whole
|
||||
is validated (to make sure there are no missing settings, etc).
|
||||
- Extensions are loaded through the run config object, which instantiates
|
||||
- Plugins are loaded through the run config object, which instantiates
|
||||
them with appropriate parameters based on the "raw" config collected earlier. When an
|
||||
Extension is instantiated in such a way, its config is "officially" added to run configuration
|
||||
Plugin is instantiated in such a way, its config is "officially" added to run configuration
|
||||
tracked by the run config object. Raw config is discarded at the end of the run, so
|
||||
that any config that wasn't loaded in this way is not recoded (as it was not actually used).
|
||||
- Extension parameters a validated individually (for type, value ranges, etc) as they are
|
||||
loaded in the Extension's __init__.
|
||||
- An extension's ``validate()`` method is invoked before it is used (exactly when this
|
||||
happens depends on the extension's type) to perform any final validation *that does not
|
||||
- Plugin parameters a validated individually (for type, value ranges, etc) as they are
|
||||
loaded in the Plugin's __init__.
|
||||
- An plugin's ``validate()`` method is invoked before it is used (exactly when this
|
||||
happens depends on the plugin's type) to perform any final validation *that does not
|
||||
rely on the target being present* (i.e. this would happen before WA connects to the target).
|
||||
This can be used perform inter-parameter validation for an extension (e.g. when valid range for
|
||||
This can be used perform inter-parameter validation for an plugin (e.g. when valid range for
|
||||
one parameter depends on another), and more general WA state assumptions (e.g. a result
|
||||
processor can check that an instrument it depends on has been installed).
|
||||
- Finally, it is the responsibility of individual extensions to validate any assumptions
|
||||
- Finally, it is the responsibility of individual plugins to validate any assumptions
|
||||
they make about the target device (usually as part of their ``setup()``).
|
||||
|
||||
**Handling of Extension aliases.**
|
||||
**Handling of Plugin aliases.**
|
||||
|
||||
WA extensions can have zero or more aliases (not to be confused with global aliases for extension
|
||||
*parameters*). An extension allows associating an alternative name for the extension with a set
|
||||
of parameter values. In other words aliases associate common configurations for an extension with
|
||||
WA plugins can have zero or more aliases (not to be confused with global aliases for plugin
|
||||
*parameters*). An plugin allows associating an alternative name for the plugin with a set
|
||||
of parameter values. In other words aliases associate common configurations for an plugin with
|
||||
a name, providing a shorthand for it. For example, "t-rex_offscreen" is an alias for "glbenchmark"
|
||||
workload that specifies that "use_case" should be "t-rex" and "variant" should be "offscreen".
|
||||
|
||||
**special loading rules**
|
||||
|
||||
Note that as a consequence of being able to specify configuration for *any* Extension namespaced
|
||||
under the Extension's name in the top-level config, two distinct mechanisms exist form configuring
|
||||
Note that as a consequence of being able to specify configuration for *any* Plugin namespaced
|
||||
under the Plugin's name in the top-level config, two distinct mechanisms exist form configuring
|
||||
devices and workloads. This is valid, however due to their nature, they are handled in a special way.
|
||||
This may be counter intuitive, so configuration of devices and workloads creating entries for their
|
||||
names in the config is discouraged in favour of using the "normal" mechanisms of configuring them
|
||||
(``device_config`` for devices and workload specs in the agenda for workloads).
|
||||
|
||||
In both cases (devices and workloads), "normal" config will always override named extension config
|
||||
In both cases (devices and workloads), "normal" config will always override named plugin config
|
||||
*irrespective of which file it was specified in*. So a ``adb_name`` name specified in ``device_config``
|
||||
inside ``~/.workload_automation/config.py`` will override ``adb_name`` specified for ``juno`` in the
|
||||
agenda (even when device is set to "juno").
|
||||
|
||||
Again, this ignores normal loading rules, so the use of named extension configuration for devices
|
||||
Again, this ignores normal loading rules, so the use of named plugin configuration for devices
|
||||
and workloads is discouraged. There maybe some situations where this behaviour is useful however
|
||||
(e.g. maintaining configuration for different devices in one config file).
|
||||
|
||||
@ -480,6 +487,8 @@ class RunConfiguration(object):
|
||||
# This is generic top-level configuration.
|
||||
general_config = [
|
||||
RunConfigurationItem('run_name', 'scalar', 'replace'),
|
||||
RunConfigurationItem('output_directory', 'scalar', 'replace'),
|
||||
RunConfigurationItem('meta_directory', 'scalar', 'replace'),
|
||||
RunConfigurationItem('project', 'scalar', 'replace'),
|
||||
RunConfigurationItem('project_stage', 'dict', 'replace'),
|
||||
RunConfigurationItem('execution_order', 'scalar', 'replace'),
|
||||
@ -507,7 +516,7 @@ class RunConfiguration(object):
|
||||
|
||||
# List of names that may be present in configuration (and it is valid for
|
||||
# them to be there) but are not handled buy RunConfiguration.
|
||||
ignore_names = ['logging', 'remote_assets_mount_point']
|
||||
ignore_names = WA_CONFIGURATION.keys()
|
||||
|
||||
def get_reboot_policy(self):
|
||||
if not self._reboot_policy:
|
||||
@ -522,6 +531,18 @@ class RunConfiguration(object):
|
||||
|
||||
reboot_policy = property(get_reboot_policy, set_reboot_policy)
|
||||
|
||||
@property
|
||||
def meta_directory(self):
|
||||
path = os.path.join(self.output_directory, "__meta")
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(os.path.abspath(path))
|
||||
return path
|
||||
|
||||
@property
|
||||
def log_file(self):
|
||||
path = os.path.join(self.output_directory, "run.log")
|
||||
return os.path.abspath(path)
|
||||
|
||||
@property
|
||||
def all_instrumentation(self):
|
||||
result = set()
|
||||
@ -529,7 +550,7 @@ class RunConfiguration(object):
|
||||
result = result.union(set(spec.instrumentation))
|
||||
return result
|
||||
|
||||
def __init__(self, ext_loader):
|
||||
def __init__(self, ext_loader=pluginloader):
|
||||
self.ext_loader = ext_loader
|
||||
self.device = None
|
||||
self.device_config = None
|
||||
@ -537,40 +558,42 @@ class RunConfiguration(object):
|
||||
self.project = None
|
||||
self.project_stage = None
|
||||
self.run_name = None
|
||||
self.output_directory = settings.default_output_directory
|
||||
self.instrumentation = {}
|
||||
self.result_processors = {}
|
||||
self.workload_specs = []
|
||||
self.flashing_config = {}
|
||||
self.other_config = {} # keeps track of used config for extensions other than of the four main kinds.
|
||||
self.other_config = {} # keeps track of used config for plugins other than of the four main kinds.
|
||||
self.retry_on_status = status_list(['FAILED', 'PARTIAL'])
|
||||
self.max_retries = 3
|
||||
self._used_config_items = []
|
||||
self._global_instrumentation = []
|
||||
self._reboot_policy = None
|
||||
self._agenda = None
|
||||
self.agenda = None
|
||||
self._finalized = False
|
||||
self._general_config_map = {i.name: i for i in self.general_config}
|
||||
self._workload_config_map = {i.name: i for i in self.workload_config}
|
||||
# Config files may contains static configuration for extensions that
|
||||
# Config files may contains static configuration for plugins that
|
||||
# would not be part of this of this run (e.g. DB connection settings
|
||||
# for a result processor that has not been enabled). Such settings
|
||||
# should not be part of configuration for this run (as they will not
|
||||
# be affecting it), but we still need to keep track it in case a later
|
||||
# config (e.g. from the agenda) enables the extension.
|
||||
# For this reason, all extension config is first loaded into the
|
||||
# following dict and when an extension is identified as need for the
|
||||
# config (e.g. from the agenda) enables the plugin.
|
||||
# For this reason, all plugin config is first loaded into the
|
||||
# following dict and when an plugin is identified as need for the
|
||||
# run, its config is picked up from this "raw" dict and it becomes part
|
||||
# of the run configuration.
|
||||
self._raw_config = {'instrumentation': [], 'result_processors': []}
|
||||
|
||||
def get_extension(self, ext_name, *args):
|
||||
def get_plugin(self, name=None, kind=None, *args, **kwargs):
|
||||
self._check_finalized()
|
||||
self._load_default_config_if_necessary(ext_name)
|
||||
ext_config = self._raw_config[ext_name]
|
||||
ext_cls = self.ext_loader.get_extension_class(ext_name)
|
||||
self._load_default_config_if_necessary(name)
|
||||
ext_config = self._raw_config[name]
|
||||
ext_cls = self.ext_loader.get_plugin_class(name)
|
||||
if ext_cls.kind not in ['workload', 'device', 'instrument', 'result_processor']:
|
||||
self.other_config[ext_name] = ext_config
|
||||
return self.ext_loader.get_extension(ext_name, *args, **ext_config)
|
||||
self.other_config[name] = ext_config
|
||||
ext_config.update(kwargs)
|
||||
return self.ext_loader.get_plugin(name=name, *args, **ext_config)
|
||||
|
||||
def to_dict(self):
|
||||
d = copy(self.__dict__)
|
||||
@ -584,8 +607,8 @@ class RunConfiguration(object):
|
||||
def load_config(self, source):
|
||||
"""Load configuration from the specified source. The source must be
|
||||
either a path to a valid config file or a dict-like object. Currently,
|
||||
config files can be either python modules (.py extension) or YAML documents
|
||||
(.yaml extension)."""
|
||||
config files can be either python modules (.py plugin) or YAML documents
|
||||
(.yaml plugin)."""
|
||||
if self._finalized:
|
||||
raise ValueError('Attempting to load a config file after run configuration has been finalized.')
|
||||
try:
|
||||
@ -597,15 +620,15 @@ class RunConfiguration(object):
|
||||
|
||||
def set_agenda(self, agenda, selectors=None):
|
||||
"""Set the agenda for this run; Unlike with config files, there can only be one agenda."""
|
||||
if self._agenda:
|
||||
if self.agenda:
|
||||
# note: this also guards against loading an agenda after finalized() has been called,
|
||||
# as that would have required an agenda to be set.
|
||||
message = 'Attempting to set a second agenda {};\n\talready have agenda {} set'
|
||||
raise ValueError(message.format(agenda.filepath, self._agenda.filepath))
|
||||
raise ValueError(message.format(agenda.filepath, self.agenda.filepath))
|
||||
try:
|
||||
self._merge_config(agenda.config or {})
|
||||
self._load_specs_from_agenda(agenda, selectors)
|
||||
self._agenda = agenda
|
||||
self.agenda = agenda
|
||||
except ConfigError as e:
|
||||
message = 'Error in {}:\n\t{}'
|
||||
raise ConfigError(message.format(agenda.filepath, e.message))
|
||||
@ -616,7 +639,7 @@ class RunConfiguration(object):
|
||||
for the run And making sure that all the mandatory config has been specified."""
|
||||
if self._finalized:
|
||||
return
|
||||
if not self._agenda:
|
||||
if not self.agenda:
|
||||
raise ValueError('Attempting to finalize run configuration before an agenda is loaded.')
|
||||
self._finalize_config_list('instrumentation')
|
||||
self._finalize_config_list('result_processors')
|
||||
@ -653,8 +676,8 @@ class RunConfiguration(object):
|
||||
self._resolve_global_alias(k, v)
|
||||
elif k in self._general_config_map:
|
||||
self._set_run_config_item(k, v)
|
||||
elif self.ext_loader.has_extension(k):
|
||||
self._set_extension_config(k, v)
|
||||
elif self.ext_loader.has_plugin(k):
|
||||
self._set_plugin_config(k, v)
|
||||
elif k == 'device_config':
|
||||
self._set_raw_dict(k, v)
|
||||
elif k in ['instrumentation', 'result_processors']:
|
||||
@ -683,7 +706,7 @@ class RunConfiguration(object):
|
||||
combined_value = item.combine(getattr(self, name, None), value)
|
||||
setattr(self, name, combined_value)
|
||||
|
||||
def _set_extension_config(self, name, value):
|
||||
def _set_plugin_config(self, name, value):
|
||||
default_config = self.ext_loader.get_default_config(name)
|
||||
self._set_raw_dict(name, value, default_config)
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import string
|
||||
from collections import OrderedDict
|
||||
|
||||
from wlauto.core.extension import Extension, Parameter
|
||||
from wlauto.core.plugin import Plugin, Parameter
|
||||
from wlauto.exceptions import ConfigError
|
||||
from wlauto.utils.types import list_of_integers, list_of, caseless_string
|
||||
|
||||
@ -135,8 +135,9 @@ class TargetInfo(object):
|
||||
return pod
|
||||
|
||||
|
||||
class DeviceManager(Extension):
|
||||
class DeviceManager(Plugin):
|
||||
|
||||
kind = "manager"
|
||||
name = None
|
||||
target_type = None
|
||||
platform_type = Platform
|
||||
|
@ -21,14 +21,15 @@ import os
|
||||
import subprocess
|
||||
import warnings
|
||||
|
||||
from wlauto.core.bootstrap import settings
|
||||
from wlauto.core.extension_loader import ExtensionLoader
|
||||
from wlauto.exceptions import WAError, ConfigError
|
||||
from wlauto.core.config.core import settings
|
||||
from wlauto.core import pluginloader
|
||||
from wlauto.exceptions import WAError
|
||||
from wlauto.utils.misc import get_traceback
|
||||
from wlauto.utils.log import init_logging
|
||||
from wlauto.utils.cli import init_argument_parser
|
||||
from wlauto.utils.doc import format_body
|
||||
|
||||
from devlib import DevlibError
|
||||
|
||||
warnings.filterwarnings(action='ignore', category=UserWarning, module='zope')
|
||||
|
||||
@ -37,9 +38,10 @@ logger = logging.getLogger('command_line')
|
||||
|
||||
|
||||
def load_commands(subparsers):
|
||||
ext_loader = ExtensionLoader(paths=settings.extension_paths)
|
||||
for command in ext_loader.list_commands():
|
||||
settings.commands[command.name] = ext_loader.get_command(command.name, subparsers=subparsers)
|
||||
commands = {}
|
||||
for command in pluginloader.list_commands():
|
||||
commands[command.name] = pluginloader.get_command(command.name, subparsers=subparsers)
|
||||
return commands
|
||||
|
||||
|
||||
def main():
|
||||
@ -52,23 +54,24 @@ def main():
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
)
|
||||
init_argument_parser(parser)
|
||||
load_commands(parser.add_subparsers(dest='command')) # each command will add its own subparser
|
||||
commands = load_commands(parser.add_subparsers(dest='command')) # each command will add its own subparser
|
||||
args = parser.parse_args()
|
||||
settings.verbosity = args.verbose
|
||||
settings.debug = args.debug
|
||||
settings.set("verbosity", args.verbose)
|
||||
settings.load_user_config()
|
||||
#settings.debug = args.debug
|
||||
if args.config:
|
||||
if not os.path.exists(args.config):
|
||||
raise ConfigError("Config file {} not found".format(args.config))
|
||||
settings.update(args.config)
|
||||
settings.load_config_file(args.config)
|
||||
init_logging(settings.verbosity)
|
||||
|
||||
command = settings.commands[args.command]
|
||||
command = commands[args.command]
|
||||
sys.exit(command.execute(args))
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logging.info('Got CTRL-C. Aborting.')
|
||||
sys.exit(3)
|
||||
except WAError as e:
|
||||
except (WAError, DevlibError) as e:
|
||||
logging.critical(e)
|
||||
sys.exit(1)
|
||||
except subprocess.CalledProcessError as e:
|
||||
|
@ -49,10 +49,10 @@ from itertools import izip_longest
|
||||
|
||||
import wlauto.core.signal as signal
|
||||
from wlauto.core import instrumentation
|
||||
from wlauto.core.bootstrap import settings
|
||||
from wlauto.core.extension import Artifact
|
||||
from wlauto.core.config.core import settings
|
||||
from wlauto.core.plugin import Artifact
|
||||
from wlauto.core.configuration import RunConfiguration
|
||||
from wlauto.core.extension_loader import ExtensionLoader
|
||||
from wlauto.core import pluginloader
|
||||
from wlauto.core.resolver import ResourceResolver
|
||||
from wlauto.core.result import ResultManager, IterationResult, RunResult
|
||||
from wlauto.exceptions import (WAError, ConfigError, TimeoutError, InstrumentError,
|
||||
@ -85,7 +85,7 @@ class RunInfo(object):
|
||||
self.duration = None
|
||||
self.project = config.project
|
||||
self.project_stage = config.project_stage
|
||||
self.run_name = config.run_name or "{}_{}".format(os.path.split(settings.output_directory)[1],
|
||||
self.run_name = config.run_name or "{}_{}".format(os.path.split(config.output_directory)[1],
|
||||
datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S"))
|
||||
self.notes = None
|
||||
self.device_properties = {}
|
||||
@ -153,21 +153,21 @@ class ExecutionContext(object):
|
||||
self.last_error = None
|
||||
self.run_info = None
|
||||
self.run_result = None
|
||||
self.run_output_directory = settings.output_directory
|
||||
self.host_working_directory = settings.meta_directory
|
||||
self.run_output_directory = self.config.output_directory
|
||||
self.host_working_directory = self.config.meta_directory
|
||||
self.iteration_artifacts = None
|
||||
self.run_artifacts = copy(self.default_run_artifacts)
|
||||
self.job_iteration_counts = defaultdict(int)
|
||||
self.aborted = False
|
||||
self.runner = None
|
||||
if settings.agenda:
|
||||
if config.agenda:
|
||||
self.run_artifacts.append(Artifact('agenda',
|
||||
os.path.join(self.host_working_directory,
|
||||
os.path.basename(settings.agenda)),
|
||||
os.path.basename(config.agenda.filepath)),
|
||||
'meta',
|
||||
mandatory=True,
|
||||
description='Agenda for this run.'))
|
||||
for i, filepath in enumerate(settings.loaded_files, 1):
|
||||
for i, filepath in enumerate(settings.config_paths, 1):
|
||||
name = 'config_{}'.format(i)
|
||||
path = os.path.join(self.host_working_directory,
|
||||
name + os.path.splitext(filepath)[1])
|
||||
@ -253,12 +253,12 @@ class Executor(object):
|
||||
"""
|
||||
# pylint: disable=R0915
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, config):
|
||||
self.logger = logging.getLogger('Executor')
|
||||
self.error_logged = False
|
||||
self.warning_logged = False
|
||||
self.config = None
|
||||
self.ext_loader = None
|
||||
self.config = config
|
||||
pluginloader = None
|
||||
self.device_manager = None
|
||||
self.device = None
|
||||
self.context = None
|
||||
@ -287,23 +287,18 @@ class Executor(object):
|
||||
signal.connect(self._warning_signalled_callback, signal.WARNING_LOGGED)
|
||||
|
||||
self.logger.info('Initializing')
|
||||
self.ext_loader = ExtensionLoader(packages=settings.extension_packages,
|
||||
paths=settings.extension_paths)
|
||||
|
||||
self.logger.debug('Loading run configuration.')
|
||||
self.config = RunConfiguration(self.ext_loader)
|
||||
for filepath in settings.get_config_paths():
|
||||
self.config.load_config(filepath)
|
||||
self.config.set_agenda(agenda, selectors)
|
||||
self.config.finalize()
|
||||
config_outfile = os.path.join(settings.meta_directory, 'run_config.json')
|
||||
config_outfile = os.path.join(self.config.meta_directory, 'run_config.json')
|
||||
with open(config_outfile, 'w') as wfh:
|
||||
self.config.serialize(wfh)
|
||||
|
||||
self.logger.debug('Initialising device configuration.')
|
||||
if not self.config.device:
|
||||
raise ConfigError('Make sure a device is specified in the config.')
|
||||
self.device_manager = self.ext_loader.get_device_manager(self.config.device, **self.config.device_config)
|
||||
self.device_manager = pluginloader.get_manager(self.config.device, **self.config.device_config)
|
||||
self.device_manager.validate()
|
||||
self.device = self.device_manager.target
|
||||
|
||||
@ -316,20 +311,20 @@ class Executor(object):
|
||||
|
||||
self.logger.debug('Installing instrumentation')
|
||||
for name, params in self.config.instrumentation.iteritems():
|
||||
instrument = self.ext_loader.get_instrument(name, self.device, **params)
|
||||
instrument = pluginloader.get_instrument(name, self.device, **params)
|
||||
instrumentation.install(instrument)
|
||||
instrumentation.validate()
|
||||
|
||||
self.logger.debug('Installing result processors')
|
||||
result_manager = ResultManager()
|
||||
for name, params in self.config.result_processors.iteritems():
|
||||
processor = self.ext_loader.get_result_processor(name, **params)
|
||||
processor = pluginloader.get_result_processor(name, **params)
|
||||
result_manager.install(processor)
|
||||
result_manager.validate()
|
||||
|
||||
self.logger.debug('Loading workload specs')
|
||||
for workload_spec in self.config.workload_specs:
|
||||
workload_spec.load(self.device, self.ext_loader)
|
||||
workload_spec.load(self.device, pluginloader)
|
||||
workload_spec.workload.init_resources(self.context)
|
||||
workload_spec.workload.validate()
|
||||
|
||||
|
@ -1,403 +0,0 @@
|
||||
# Copyright 2013-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import inspect
|
||||
import imp
|
||||
import string
|
||||
import logging
|
||||
from functools import partial
|
||||
from collections import OrderedDict
|
||||
|
||||
from wlauto.core.bootstrap import settings
|
||||
from wlauto.core.extension import Extension
|
||||
from wlauto.exceptions import NotFoundError, LoaderError
|
||||
from wlauto.utils.misc import walk_modules, load_class, merge_lists, merge_dicts, get_article
|
||||
from wlauto.utils.types import identifier
|
||||
|
||||
|
||||
MODNAME_TRANS = string.maketrans(':/\\.', '____')
|
||||
|
||||
|
||||
class ExtensionLoaderItem(object):
|
||||
|
||||
def __init__(self, ext_tuple):
|
||||
self.name = ext_tuple.name
|
||||
self.default_package = ext_tuple.default_package
|
||||
self.default_path = ext_tuple.default_path
|
||||
self.cls = load_class(ext_tuple.cls)
|
||||
|
||||
|
||||
class GlobalParameterAlias(object):
|
||||
"""
|
||||
Represents a "global alias" for an extension parameter. A global alias
|
||||
is specified at the top-level of config rather namespaced under an extension
|
||||
name.
|
||||
|
||||
Multiple extensions may have parameters with the same global_alias if they are
|
||||
part of the same inheritance hierarchy and one parameter is an override of the
|
||||
other. This class keeps track of all such cases in its extensions dict.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.extensions = {}
|
||||
|
||||
def iteritems(self):
|
||||
for ext in self.extensions.itervalues():
|
||||
yield (self.get_param(ext), ext)
|
||||
|
||||
def get_param(self, ext):
|
||||
for param in ext.parameters:
|
||||
if param.global_alias == self.name:
|
||||
return param
|
||||
message = 'Extension {} does not have a parameter with global alias {}'
|
||||
raise ValueError(message.format(ext.name, self.name))
|
||||
|
||||
def update(self, other_ext):
|
||||
self._validate_ext(other_ext)
|
||||
self.extensions[other_ext.name] = other_ext
|
||||
|
||||
def _validate_ext(self, other_ext):
|
||||
other_param = self.get_param(other_ext)
|
||||
for param, ext in self.iteritems():
|
||||
if ((not (issubclass(ext, other_ext) or issubclass(other_ext, ext))) and
|
||||
other_param.kind != param.kind):
|
||||
message = 'Duplicate global alias {} declared in {} and {} extensions with different types'
|
||||
raise LoaderError(message.format(self.name, ext.name, other_ext.name))
|
||||
if param.kind != other_param.kind:
|
||||
message = 'Two params {} in {} and {} in {} both declare global alias {}, and are of different kinds'
|
||||
raise LoaderError(message.format(param.name, ext.name,
|
||||
other_param.name, other_ext.name, self.name))
|
||||
|
||||
def __str__(self):
|
||||
text = 'GlobalAlias({} => {})'
|
||||
extlist = ', '.join(['{}.{}'.format(e.name, p.name) for p, e in self.iteritems()])
|
||||
return text.format(self.name, extlist)
|
||||
|
||||
|
||||
class ExtensionLoader(object):
|
||||
"""
|
||||
Discovers, enumerates and loads available devices, configs, etc.
|
||||
The loader will attempt to discover things on construction by looking
|
||||
in predetermined set of locations defined by default_paths. Optionally,
|
||||
additional locations may specified through paths parameter that must
|
||||
be a list of additional Python module paths (i.e. dot-delimited).
|
||||
|
||||
"""
|
||||
|
||||
_instance = None
|
||||
|
||||
# Singleton
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if not cls._instance:
|
||||
cls._instance = super(ExtensionLoader, cls).__new__(cls, *args, **kwargs)
|
||||
else:
|
||||
for k, v in kwargs.iteritems():
|
||||
if not hasattr(cls._instance, k):
|
||||
raise ValueError('Invalid parameter for ExtensionLoader: {}'.format(k))
|
||||
setattr(cls._instance, k, v)
|
||||
return cls._instance
|
||||
|
||||
def set_load_defaults(self, value):
|
||||
self._load_defaults = value
|
||||
if value:
|
||||
self.packages = merge_lists(self.default_packages, self.packages, duplicates='last')
|
||||
|
||||
def get_load_defaults(self):
|
||||
return self._load_defaults
|
||||
|
||||
load_defaults = property(get_load_defaults, set_load_defaults)
|
||||
|
||||
def __init__(self, packages=None, paths=None, ignore_paths=None, keep_going=False, load_defaults=True):
|
||||
"""
|
||||
params::
|
||||
|
||||
:packages: List of packages to load extensions from.
|
||||
:paths: List of paths to be searched for Python modules containing
|
||||
WA extensions.
|
||||
:ignore_paths: List of paths to ignore when search for WA extensions (these would
|
||||
typically be subdirectories of one or more locations listed in
|
||||
``paths`` parameter.
|
||||
:keep_going: Specifies whether to keep going if an error occurs while loading
|
||||
extensions.
|
||||
:load_defaults: Specifies whether extension should be loaded from default locations
|
||||
(WA package, and user's WA directory) as well as the packages/paths
|
||||
specified explicitly in ``packages`` and ``paths`` parameters.
|
||||
|
||||
"""
|
||||
self._load_defaults = None
|
||||
self.logger = logging.getLogger('ExtensionLoader')
|
||||
self.keep_going = keep_going
|
||||
self.extension_kinds = {ext_tuple.name: ExtensionLoaderItem(ext_tuple)
|
||||
for ext_tuple in settings.extensions}
|
||||
self.default_packages = [ext.default_package for ext in self.extension_kinds.values()]
|
||||
|
||||
self.packages = packages or []
|
||||
self.load_defaults = load_defaults
|
||||
self.paths = paths or []
|
||||
self.ignore_paths = ignore_paths or []
|
||||
self.extensions = {}
|
||||
self.aliases = {}
|
||||
self.global_param_aliases = {}
|
||||
# create an empty dict for each extension type to store discovered
|
||||
# extensions.
|
||||
for ext in self.extension_kinds.values():
|
||||
setattr(self, '_' + ext.name, {})
|
||||
self._load_from_packages(self.packages)
|
||||
self._load_from_paths(self.paths, self.ignore_paths)
|
||||
|
||||
def update(self, packages=None, paths=None, ignore_paths=None):
|
||||
""" Load extensions from the specified paths/packages
|
||||
without clearing or reloading existing extension. """
|
||||
if packages:
|
||||
self.packages.extend(packages)
|
||||
self._load_from_packages(packages)
|
||||
if paths:
|
||||
self.paths.extend(paths)
|
||||
self.ignore_paths.extend(ignore_paths or [])
|
||||
self._load_from_paths(paths, ignore_paths or [])
|
||||
|
||||
def clear(self):
|
||||
""" Clear all discovered items. """
|
||||
self.extensions.clear()
|
||||
for ext in self.extension_kinds.values():
|
||||
self._get_store(ext).clear()
|
||||
|
||||
def reload(self):
|
||||
""" Clear all discovered items and re-run the discovery. """
|
||||
self.clear()
|
||||
self._load_from_packages(self.packages)
|
||||
self._load_from_paths(self.paths, self.ignore_paths)
|
||||
|
||||
def get_extension_class(self, name, kind=None):
|
||||
"""
|
||||
Return the class for the specified extension if found or raises ``ValueError``.
|
||||
|
||||
"""
|
||||
name, _ = self.resolve_alias(name)
|
||||
if kind is None:
|
||||
return self.extensions[name]
|
||||
ext = self.extension_kinds.get(kind)
|
||||
if ext is None:
|
||||
raise ValueError('Unknown extension type: {}'.format(kind))
|
||||
store = self._get_store(ext)
|
||||
if name not in store:
|
||||
raise NotFoundError('Extensions {} is not {} {}.'.format(name, get_article(kind), kind))
|
||||
return store[name]
|
||||
|
||||
def get_extension(self, name, *args, **kwargs):
|
||||
"""
|
||||
Return extension of the specified kind with the specified name. Any additional
|
||||
parameters will be passed to the extension's __init__.
|
||||
|
||||
"""
|
||||
name, base_kwargs = self.resolve_alias(name)
|
||||
kind = kwargs.pop('kind', None)
|
||||
kwargs = merge_dicts(base_kwargs, kwargs, list_duplicates='last', dict_type=OrderedDict)
|
||||
cls = self.get_extension_class(name, kind)
|
||||
extension = _instantiate(cls, args, kwargs)
|
||||
extension.load_modules(self)
|
||||
return extension
|
||||
|
||||
def get_default_config(self, ext_name):
|
||||
"""
|
||||
Returns the default configuration for the specified extension name. The name may be an alias,
|
||||
in which case, the returned config will be augmented with appropriate alias overrides.
|
||||
|
||||
"""
|
||||
real_name, alias_config = self.resolve_alias(ext_name)
|
||||
base_default_config = self.get_extension_class(real_name).get_default_config()
|
||||
return merge_dicts(base_default_config, alias_config, list_duplicates='last', dict_type=OrderedDict)
|
||||
|
||||
def list_extensions(self, kind=None):
|
||||
"""
|
||||
List discovered extension classes. Optionally, only list extensions of a
|
||||
particular type.
|
||||
|
||||
"""
|
||||
if kind is None:
|
||||
return self.extensions.values()
|
||||
if kind not in self.extension_kinds:
|
||||
raise ValueError('Unknown extension type: {}'.format(kind))
|
||||
return self._get_store(self.extension_kinds[kind]).values()
|
||||
|
||||
def has_extension(self, name, kind=None):
|
||||
"""
|
||||
Returns ``True`` if an extensions with the specified ``name`` has been
|
||||
discovered by the loader. If ``kind`` was specified, only returns ``True``
|
||||
if the extension has been found, *and* it is of the specified kind.
|
||||
|
||||
"""
|
||||
try:
|
||||
self.get_extension_class(name, kind)
|
||||
return True
|
||||
except NotFoundError:
|
||||
return False
|
||||
|
||||
def resolve_alias(self, alias_name):
|
||||
"""
|
||||
Try to resolve the specified name as an extension alias. Returns a
|
||||
two-tuple, the first value of which is actual extension name, and the
|
||||
second is a dict of parameter values for this alias. If the name passed
|
||||
is already an extension name, then the result is ``(alias_name, {})``.
|
||||
|
||||
"""
|
||||
alias_name = identifier(alias_name.lower())
|
||||
if alias_name in self.extensions:
|
||||
return (alias_name, {})
|
||||
if alias_name in self.aliases:
|
||||
alias = self.aliases[alias_name]
|
||||
return (alias.extension_name, alias.params)
|
||||
raise NotFoundError('Could not find extension or alias "{}"'.format(alias_name))
|
||||
|
||||
# Internal methods.
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""
|
||||
This resolves methods for specific extensions types based on corresponding
|
||||
generic extension methods. So it's possible to say things like ::
|
||||
|
||||
loader.get_device('foo')
|
||||
|
||||
instead of ::
|
||||
|
||||
loader.get_extension('foo', kind='device')
|
||||
|
||||
"""
|
||||
if name.startswith('get_'):
|
||||
name = name.replace('get_', '', 1)
|
||||
if name in self.extension_kinds:
|
||||
return partial(self.get_extension, kind=name)
|
||||
if name.startswith('list_'):
|
||||
name = name.replace('list_', '', 1).rstrip('s')
|
||||
if name in self.extension_kinds:
|
||||
return partial(self.list_extensions, kind=name)
|
||||
if name.startswith('has_'):
|
||||
name = name.replace('has_', '', 1)
|
||||
if name in self.extension_kinds:
|
||||
return partial(self.has_extension, kind=name)
|
||||
raise AttributeError(name)
|
||||
|
||||
def _get_store(self, ext):
|
||||
name = getattr(ext, 'name', ext)
|
||||
return getattr(self, '_' + name)
|
||||
|
||||
def _load_from_packages(self, packages):
|
||||
try:
|
||||
for package in packages:
|
||||
for module in walk_modules(package):
|
||||
self._load_module(module)
|
||||
except ImportError as e:
|
||||
message = 'Problem loading extensions from package {}: {}'
|
||||
raise LoaderError(message.format(package, e.message))
|
||||
|
||||
def _load_from_paths(self, paths, ignore_paths):
|
||||
self.logger.debug('Loading from paths.')
|
||||
for path in paths:
|
||||
self.logger.debug('Checking path %s', path)
|
||||
for root, _, files in os.walk(path, followlinks=True):
|
||||
should_skip = False
|
||||
for igpath in ignore_paths:
|
||||
if root.startswith(igpath):
|
||||
should_skip = True
|
||||
break
|
||||
if should_skip:
|
||||
continue
|
||||
for fname in files:
|
||||
if os.path.splitext(fname)[1].lower() != '.py':
|
||||
continue
|
||||
filepath = os.path.join(root, fname)
|
||||
try:
|
||||
modname = os.path.splitext(filepath[1:])[0].translate(MODNAME_TRANS)
|
||||
module = imp.load_source(modname, filepath)
|
||||
self._load_module(module)
|
||||
except (SystemExit, ImportError), e:
|
||||
if self.keep_going:
|
||||
self.logger.warn('Failed to load {}'.format(filepath))
|
||||
self.logger.warn('Got: {}'.format(e))
|
||||
else:
|
||||
raise LoaderError('Failed to load {}'.format(filepath), sys.exc_info())
|
||||
except Exception as e:
|
||||
message = 'Problem loading extensions from {}: {}'
|
||||
raise LoaderError(message.format(filepath, e))
|
||||
|
||||
def _load_module(self, module): # NOQA pylint: disable=too-many-branches
|
||||
self.logger.debug('Checking module %s', module.__name__)
|
||||
for obj in vars(module).itervalues():
|
||||
if inspect.isclass(obj):
|
||||
if not issubclass(obj, Extension) or not hasattr(obj, 'name') or not obj.name:
|
||||
continue
|
||||
try:
|
||||
for ext in self.extension_kinds.values():
|
||||
if issubclass(obj, ext.cls):
|
||||
self._add_found_extension(obj, ext)
|
||||
break
|
||||
else: # did not find a matching Extension type
|
||||
message = 'Unknown extension type for {} (type: {})'
|
||||
raise LoaderError(message.format(obj.name, obj.__class__.__name__))
|
||||
except LoaderError as e:
|
||||
if self.keep_going:
|
||||
self.logger.warning(e)
|
||||
else:
|
||||
raise e
|
||||
|
||||
def _add_found_extension(self, obj, ext):
|
||||
"""
|
||||
:obj: Found extension class
|
||||
:ext: matching extension item.
|
||||
"""
|
||||
self.logger.debug('\tAdding %s %s', ext.name, obj.name)
|
||||
key = identifier(obj.name.lower())
|
||||
obj.kind = ext.name
|
||||
if key in self.extensions or key in self.aliases:
|
||||
raise LoaderError('{} {} already exists.'.format(ext.name, obj.name))
|
||||
# Extensions are tracked both, in a common extensions
|
||||
# dict, and in per-extension kind dict (as retrieving
|
||||
# extensions by kind is a common use case.
|
||||
self.extensions[key] = obj
|
||||
store = self._get_store(ext)
|
||||
store[key] = obj
|
||||
for alias in obj.aliases:
|
||||
alias_id = identifier(alias.name)
|
||||
if alias_id in self.extensions or alias_id in self.aliases:
|
||||
raise LoaderError('{} {} already exists.'.format(ext.name, obj.name))
|
||||
self.aliases[alias_id] = alias
|
||||
|
||||
# Update global aliases list. If a global alias is already in the list,
|
||||
# then make sure this extension is in the same parent/child hierarchy
|
||||
# as the one already found.
|
||||
for param in obj.parameters:
|
||||
if param.global_alias:
|
||||
if param.global_alias not in self.global_param_aliases:
|
||||
ga = GlobalParameterAlias(param.global_alias)
|
||||
ga.update(obj)
|
||||
self.global_param_aliases[ga.name] = ga
|
||||
else: # global alias already exists.
|
||||
self.global_param_aliases[param.global_alias].update(obj)
|
||||
|
||||
|
||||
# Utility functions.
|
||||
|
||||
def _instantiate(cls, args=None, kwargs=None):
|
||||
args = [] if args is None else args
|
||||
kwargs = {} if kwargs is None else kwargs
|
||||
try:
|
||||
return cls(*args, **kwargs)
|
||||
except Exception:
|
||||
raise LoaderError('Could not load {}'.format(cls), sys.exc_info())
|
@ -15,21 +15,18 @@
|
||||
|
||||
|
||||
# Separate module to avoid circular dependencies
|
||||
from wlauto.core.bootstrap import settings
|
||||
from wlauto.core.extension import Extension
|
||||
from wlauto.core.config.core import settings
|
||||
from wlauto.core.plugin import Plugin
|
||||
from wlauto.utils.misc import load_class
|
||||
from wlauto.core import pluginloader
|
||||
|
||||
|
||||
_extension_bases = {ext.name: load_class(ext.cls) for ext in settings.extensions}
|
||||
|
||||
|
||||
def get_extension_type(ext):
|
||||
"""Given an instance of ``wlauto.core.Extension``, return a string representing
|
||||
the type of the extension (e.g. ``'workload'`` for a Workload subclass instance)."""
|
||||
if not isinstance(ext, Extension):
|
||||
raise ValueError('{} is not an instance of Extension'.format(ext))
|
||||
for name, cls in _extension_bases.iteritems():
|
||||
def get_plugin_type(ext):
|
||||
"""Given an instance of ``wlauto.core.Plugin``, return a string representing
|
||||
the type of the plugin (e.g. ``'workload'`` for a Workload subclass instance)."""
|
||||
if not isinstance(ext, Plugin):
|
||||
raise ValueError('{} is not an instance of Plugin'.format(ext))
|
||||
for name, cls in pluginloaderkind_map.iteritems():
|
||||
if isinstance(ext, cls):
|
||||
return name
|
||||
raise ValueError('Unknown extension type: {}'.format(ext.__class__.__name__))
|
||||
|
||||
raise ValueError('Unknown plugin type: {}'.format(ext.__class__.__name__))
|
||||
|
@ -103,7 +103,7 @@ import inspect
|
||||
from collections import OrderedDict
|
||||
|
||||
import wlauto.core.signal as signal
|
||||
from wlauto.core.extension import Extension
|
||||
from wlauto.core.plugin import Plugin
|
||||
from wlauto.exceptions import WAError, DeviceNotRespondingError, TimeoutError
|
||||
from wlauto.utils.misc import get_traceback, isiterable
|
||||
from wlauto.utils.types import identifier
|
||||
@ -374,10 +374,11 @@ def get_disabled():
|
||||
return [i for i in installed if not i.is_enabled]
|
||||
|
||||
|
||||
class Instrument(Extension):
|
||||
class Instrument(Plugin):
|
||||
"""
|
||||
Base class for instrumentation implementations.
|
||||
"""
|
||||
kind = "instrument"
|
||||
|
||||
def __init__(self, device, **kwargs):
|
||||
super(Instrument, self).__init__(**kwargs)
|
||||
@ -396,4 +397,3 @@ class Instrument(Extension):
|
||||
|
||||
def __repr__(self):
|
||||
return 'Instrument({})'.format(self.name)
|
||||
|
||||
|
@ -16,20 +16,27 @@
|
||||
|
||||
# pylint: disable=E1101
|
||||
import os
|
||||
import logging
|
||||
import sys
|
||||
import inspect
|
||||
import imp
|
||||
import string
|
||||
import logging
|
||||
from collections import OrderedDict, defaultdict
|
||||
from itertools import chain
|
||||
from copy import copy
|
||||
from collections import OrderedDict
|
||||
|
||||
from wlauto.core.bootstrap import settings
|
||||
from wlauto.exceptions import ValidationError, ConfigError
|
||||
from wlauto.utils.misc import isiterable, ensure_directory_exists as _d, get_article
|
||||
from wlauto.exceptions import NotFoundError, LoaderError, ValidationError, ConfigError
|
||||
from wlauto.utils.misc import isiterable, ensure_directory_exists as _d, walk_modules, load_class, merge_dicts, get_article
|
||||
from wlauto.core.config.core import settings
|
||||
from wlauto.utils.types import identifier, integer, boolean
|
||||
from wlauto.core.config.core import ConfigurationPoint, ConfigurationPointCollection
|
||||
|
||||
MODNAME_TRANS = string.maketrans(':/\\.', '____')
|
||||
|
||||
|
||||
class AttributeCollection(object):
|
||||
"""
|
||||
Accumulator for extension attribute objects (such as Parameters or Artifacts). This will
|
||||
Accumulator for plugin attribute objects (such as Parameters or Artifacts). This will
|
||||
replace any class member list accumulating such attributes through the magic of
|
||||
metaprogramming\ [*]_.
|
||||
|
||||
@ -41,10 +48,9 @@ class AttributeCollection(object):
|
||||
def values(self):
|
||||
return self._attrs.values()
|
||||
|
||||
def __init__(self, attrcls, owner):
|
||||
def __init__(self, attrcls):
|
||||
self._attrcls = attrcls
|
||||
self._attrs = OrderedDict()
|
||||
self.owner = owner
|
||||
|
||||
def add(self, p):
|
||||
p = self._to_attrcls(p)
|
||||
@ -55,7 +61,7 @@ class AttributeCollection(object):
|
||||
if v is not None:
|
||||
setattr(newp, a, v)
|
||||
if not hasattr(newp, "_overridden"):
|
||||
newp._overridden = self.owner # pylint: disable=protected-access
|
||||
newp._overridden = p._owner
|
||||
self._attrs[p.name] = newp
|
||||
else:
|
||||
# Duplicate attribute condition is check elsewhere.
|
||||
@ -71,6 +77,7 @@ class AttributeCollection(object):
|
||||
__repr__ = __str__
|
||||
|
||||
def _to_attrcls(self, p):
|
||||
old_owner = getattr(p, "_owner", None)
|
||||
if isinstance(p, basestring):
|
||||
p = self._attrcls(p)
|
||||
elif isinstance(p, tuple) or isinstance(p, list):
|
||||
@ -82,15 +89,11 @@ class AttributeCollection(object):
|
||||
if (p.name in self._attrs and not p.override and
|
||||
p.name != 'modules'): # TODO: HACK due to "diamond dependecy" in workloads...
|
||||
raise ValueError('Attribute {} has already been defined.'.format(p.name))
|
||||
p._owner = old_owner
|
||||
return p
|
||||
|
||||
def __iadd__(self, other):
|
||||
other = [self._to_attrcls(p) for p in other]
|
||||
names = []
|
||||
for p in other:
|
||||
if p.name in names:
|
||||
raise ValueError("Duplicate '{}' {}".format(p.name, p.__class__.__name__.split('.')[-1]))
|
||||
names.append(p.name)
|
||||
self.add(p)
|
||||
return self
|
||||
|
||||
@ -110,7 +113,7 @@ class AttributeCollection(object):
|
||||
class AliasCollection(AttributeCollection):
|
||||
|
||||
def __init__(self):
|
||||
super(AliasCollection, self).__init__(Alias, None)
|
||||
super(AliasCollection, self).__init__(Alias)
|
||||
|
||||
def _to_attrcls(self, p):
|
||||
if isinstance(p, tuple) or isinstance(p, list):
|
||||
@ -125,158 +128,57 @@ class AliasCollection(AttributeCollection):
|
||||
|
||||
class ListCollection(list):
|
||||
|
||||
def __init__(self, attrcls, owner): # pylint: disable=unused-argument
|
||||
def __init__(self, attrcls): # pylint: disable=unused-argument
|
||||
super(ListCollection, self).__init__()
|
||||
self.owner = owner
|
||||
|
||||
|
||||
class Param(object):
|
||||
class Parameter(ConfigurationPoint):
|
||||
|
||||
is_runtime = False
|
||||
|
||||
def __init__(self, name,
|
||||
kind=None,
|
||||
mandatory=None,
|
||||
default=None,
|
||||
override=False,
|
||||
allowed_values=None,
|
||||
description=None,
|
||||
constraint=None,
|
||||
convert_types=True,
|
||||
global_alias=None,
|
||||
reconfigurable=True):
|
||||
"""
|
||||
This is a generic parameter for an extension. Extensions instantiate this to declare which parameters
|
||||
are supported.
|
||||
:param global_alias: This is an alternative alias for this parameter,
|
||||
unlike the name, this alias will not be
|
||||
namespaced under the owning extension's name
|
||||
(hence the global part). This is introduced
|
||||
primarily for backward compatibility -- so that
|
||||
old extension settings names still work. This
|
||||
should not be used for new parameters.
|
||||
|
||||
:param reconfigurable: This indicated whether this parameter may be
|
||||
reconfigured during the run (e.g. between different
|
||||
iterations). This determines where in run configruation
|
||||
this parameter may appear.
|
||||
|
||||
For other parameters, see docstring for
|
||||
``wa.framework.config.core.ConfigurationPoint``
|
||||
|
||||
"""
|
||||
|
||||
# Mapping for kind conversion; see docs for convert_types below
|
||||
kind_map = {
|
||||
int: integer,
|
||||
bool: boolean,
|
||||
}
|
||||
|
||||
def __init__(self, name, kind=None, mandatory=None, default=None, override=False,
|
||||
allowed_values=None, description=None, constraint=None, global_alias=None, convert_types=True):
|
||||
"""
|
||||
Create a new Parameter object.
|
||||
|
||||
:param name: The name of the parameter. This will become an instance member of the
|
||||
extension object to which the parameter is applied, so it must be a valid
|
||||
python identifier. This is the only mandatory parameter.
|
||||
:param kind: The type of parameter this is. This must be a callable that takes an arbitrary
|
||||
object and converts it to the expected type, or raised ``ValueError`` if such
|
||||
conversion is not possible. Most Python standard types -- ``str``, ``int``, ``bool``, etc. --
|
||||
can be used here. This defaults to ``str`` if not specified.
|
||||
:param mandatory: If set to ``True``, then a non-``None`` value for this parameter *must* be
|
||||
provided on extension object construction, otherwise ``ConfigError`` will be
|
||||
raised.
|
||||
:param default: The default value for this parameter. If no value is specified on extension
|
||||
construction, this value will be used instead. (Note: if this is specified and
|
||||
is not ``None``, then ``mandatory`` parameter will be ignored).
|
||||
:param override: A ``bool`` that specifies whether a parameter of the same name further up the
|
||||
hierarchy should be overridden. If this is ``False`` (the default), an exception
|
||||
will be raised by the ``AttributeCollection`` instead.
|
||||
:param allowed_values: This should be the complete list of allowed values for this parameter.
|
||||
Note: ``None`` value will always be allowed, even if it is not in this list.
|
||||
If you want to disallow ``None``, set ``mandatory`` to ``True``.
|
||||
:param constraint: If specified, this must be a callable that takes the parameter value
|
||||
as an argument and return a boolean indicating whether the constraint
|
||||
has been satisfied. Alternatively, can be a two-tuple with said callable as
|
||||
the first element and a string describing the constraint as the second.
|
||||
:param global_alias: This is an alternative alias for this parameter, unlike the name, this
|
||||
alias will not be namespaced under the owning extension's name (hence the
|
||||
global part). This is introduced primarily for backward compatibility -- so
|
||||
that old extension settings names still work. This should not be used for
|
||||
new parameters.
|
||||
|
||||
:param convert_types: If ``True`` (the default), will automatically convert ``kind`` values from
|
||||
native Python types to WA equivalents. This allows more ituitive interprestation
|
||||
of parameter values, e.g. the string ``"false"`` being interpreted as ``False``
|
||||
when specifed as the value for a boolean Parameter.
|
||||
|
||||
"""
|
||||
self.name = identifier(name)
|
||||
if kind is not None and not callable(kind):
|
||||
raise ValueError('Kind must be callable.')
|
||||
if convert_types and kind in self.kind_map:
|
||||
kind = self.kind_map[kind]
|
||||
self.kind = kind
|
||||
self.mandatory = mandatory
|
||||
self.default = default
|
||||
self.override = override
|
||||
self.allowed_values = allowed_values
|
||||
self.description = description
|
||||
if self.kind is None and not self.override:
|
||||
self.kind = str
|
||||
if constraint is not None and not callable(constraint) and not isinstance(constraint, tuple):
|
||||
raise ValueError('Constraint must be callable or a (callable, str) tuple.')
|
||||
self.constraint = constraint
|
||||
super(Parameter, self).__init__(name, kind, mandatory,
|
||||
default, override, allowed_values,
|
||||
description, constraint,
|
||||
convert_types)
|
||||
self.global_alias = global_alias
|
||||
|
||||
def set_value(self, obj, value=None):
|
||||
if value is None:
|
||||
if self.default is not None:
|
||||
value = self.default
|
||||
elif self.mandatory:
|
||||
msg = 'No values specified for mandatory parameter {} in {}'
|
||||
raise ConfigError(msg.format(self.name, obj.name))
|
||||
else:
|
||||
try:
|
||||
value = self.kind(value)
|
||||
except (ValueError, TypeError):
|
||||
typename = self.get_type_name()
|
||||
msg = 'Bad value "{}" for {}; must be {} {}'
|
||||
article = get_article(typename)
|
||||
raise ConfigError(msg.format(value, self.name, article, typename))
|
||||
current_value = getattr(obj, self.name, None)
|
||||
if current_value is None:
|
||||
setattr(obj, self.name, value)
|
||||
elif not isiterable(current_value):
|
||||
setattr(obj, self.name, value)
|
||||
else:
|
||||
new_value = current_value + [value]
|
||||
setattr(obj, self.name, new_value)
|
||||
|
||||
def validate(self, obj):
|
||||
value = getattr(obj, self.name, None)
|
||||
if value is not None:
|
||||
if self.allowed_values:
|
||||
self._validate_allowed_values(obj, value)
|
||||
if self.constraint:
|
||||
self._validate_constraint(obj, value)
|
||||
else:
|
||||
if self.mandatory:
|
||||
msg = 'No value specified for mandatory parameter {} in {}.'
|
||||
raise ConfigError(msg.format(self.name, obj.name))
|
||||
|
||||
def get_type_name(self):
|
||||
typename = str(self.kind)
|
||||
if '\'' in typename:
|
||||
typename = typename.split('\'')[1]
|
||||
elif typename.startswith('<function'):
|
||||
typename = typename.split()[1]
|
||||
return typename
|
||||
|
||||
def _validate_allowed_values(self, obj, value):
|
||||
if 'list' in str(self.kind):
|
||||
for v in value:
|
||||
if v not in self.allowed_values:
|
||||
msg = 'Invalid value {} for {} in {}; must be in {}'
|
||||
raise ConfigError(msg.format(v, self.name, obj.name, self.allowed_values))
|
||||
else:
|
||||
if value not in self.allowed_values:
|
||||
msg = 'Invalid value {} for {} in {}; must be in {}'
|
||||
raise ConfigError(msg.format(value, self.name, obj.name, self.allowed_values))
|
||||
|
||||
def _validate_constraint(self, obj, value):
|
||||
msg_vals = {'value': value, 'param': self.name, 'extension': obj.name}
|
||||
if isinstance(self.constraint, tuple) and len(self.constraint) == 2:
|
||||
constraint, msg = self.constraint # pylint: disable=unpacking-non-sequence
|
||||
elif callable(self.constraint):
|
||||
constraint = self.constraint
|
||||
msg = '"{value}" failed constraint validation for {param} in {extension}.'
|
||||
else:
|
||||
raise ValueError('Invalid constraint for {}: must be callable or a 2-tuple'.format(self.name))
|
||||
if not constraint(value):
|
||||
raise ConfigError(value, msg.format(**msg_vals))
|
||||
self.reconfigurable = reconfigurable
|
||||
|
||||
def __repr__(self):
|
||||
d = copy(self.__dict__)
|
||||
del d['description']
|
||||
return 'Param({})'.format(d)
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
|
||||
Parameter = Param
|
||||
Param = Parameter
|
||||
|
||||
|
||||
class Artifact(object):
|
||||
@ -360,7 +262,7 @@ class Artifact(object):
|
||||
|
||||
class Alias(object):
|
||||
"""
|
||||
This represents a configuration alias for an extension, mapping an alternative name to
|
||||
This represents a configuration alias for an plugin, mapping an alternative name to
|
||||
a set of parameter values, effectively providing an alternative set of default values.
|
||||
|
||||
"""
|
||||
@ -368,7 +270,7 @@ class Alias(object):
|
||||
def __init__(self, name, **kwargs):
|
||||
self.name = name
|
||||
self.params = kwargs
|
||||
self.extension_name = None # gets set by the MetaClass
|
||||
self.plugin_name = None # gets set by the MetaClass
|
||||
|
||||
def validate(self, ext):
|
||||
ext_params = set(p.name for p in ext.parameters)
|
||||
@ -380,9 +282,9 @@ class Alias(object):
|
||||
raise ConfigError(msg.format(param, self.name, ext.name))
|
||||
|
||||
|
||||
class ExtensionMeta(type):
|
||||
class PluginMeta(type):
|
||||
"""
|
||||
This basically adds some magic to extensions to make implementing new extensions, such as
|
||||
This basically adds some magic to plugins to make implementing new plugins, such as
|
||||
workloads less complicated.
|
||||
|
||||
It ensures that certain class attributes (specified by the ``to_propagate``
|
||||
@ -421,13 +323,16 @@ class ExtensionMeta(type):
|
||||
"""
|
||||
for prop_attr, attr_cls, attr_collector_cls in mcs.to_propagate:
|
||||
should_propagate = False
|
||||
propagated = attr_collector_cls(attr_cls, clsname)
|
||||
propagated = attr_collector_cls(attr_cls)
|
||||
for base in bases:
|
||||
if hasattr(base, prop_attr):
|
||||
propagated += getattr(base, prop_attr) or []
|
||||
should_propagate = True
|
||||
if prop_attr in attrs:
|
||||
pattrs = attrs[prop_attr] or []
|
||||
for pa in pattrs:
|
||||
if not isinstance(pa, basestring):
|
||||
pa._owner = clsname
|
||||
propagated += pattrs
|
||||
should_propagate = True
|
||||
if should_propagate:
|
||||
@ -436,7 +341,7 @@ class ExtensionMeta(type):
|
||||
overridden = bool(getattr(p, "_overridden", None))
|
||||
if override != overridden:
|
||||
msg = "Overriding non existing parameter '{}' inside '{}'"
|
||||
raise ValueError(msg.format(p.name, clsname))
|
||||
raise ValueError(msg.format(p.name, p._owner))
|
||||
attrs[prop_attr] = propagated
|
||||
|
||||
@classmethod
|
||||
@ -447,7 +352,7 @@ class ExtensionMeta(type):
|
||||
if isinstance(alias, basestring):
|
||||
alias = Alias(alias)
|
||||
alias.validate(cls)
|
||||
alias.extension_name = cls.name
|
||||
alias.plugin_name = cls.name
|
||||
cls.aliases.add(alias)
|
||||
|
||||
@classmethod
|
||||
@ -492,25 +397,25 @@ class ExtensionMeta(type):
|
||||
setattr(cls, vmname, generate_method_wrapper(vmname))
|
||||
|
||||
|
||||
class Extension(object):
|
||||
class Plugin(object):
|
||||
"""
|
||||
Base class for all WA extensions. An extension is basically a plug-in.
|
||||
It extends the functionality of WA in some way. Extensions are discovered
|
||||
and loaded dynamically by the extension loader upon invocation of WA scripts.
|
||||
Adding an extension is a matter of placing a class that implements an appropriate
|
||||
Base class for all WA plugins. An plugin is basically a plug-in.
|
||||
It extends the functionality of WA in some way. Plugins are discovered
|
||||
and loaded dynamically by the plugin loader upon invocation of WA scripts.
|
||||
Adding an plugin is a matter of placing a class that implements an appropriate
|
||||
interface somewhere it would be discovered by the loader. That "somewhere" is
|
||||
typically one of the extension subdirectories under ``~/.workload_automation/``.
|
||||
typically one of the plugin subdirectories under ``~/.workload_automation/``.
|
||||
|
||||
"""
|
||||
__metaclass__ = ExtensionMeta
|
||||
__metaclass__ = PluginMeta
|
||||
|
||||
kind = None
|
||||
name = None
|
||||
parameters = [
|
||||
Parameter('modules', kind=list,
|
||||
description="""
|
||||
Lists the modules to be loaded by this extension. A module is a plug-in that
|
||||
further extends functionality of an extension.
|
||||
Lists the modules to be loaded by this plugin. A module is a plug-in that
|
||||
further extends functionality of an plugin.
|
||||
"""),
|
||||
]
|
||||
artifacts = []
|
||||
@ -530,7 +435,6 @@ class Extension(object):
|
||||
return self.__class__.__name__
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.__check_from_loader()
|
||||
self.logger = logging.getLogger(self._classname)
|
||||
self._modules = []
|
||||
self.capabilities = getattr(self.__class__, 'capabilities', [])
|
||||
@ -543,7 +447,7 @@ class Extension(object):
|
||||
|
||||
def get_config(self):
|
||||
"""
|
||||
Returns current configuration (i.e. parameter values) of this extension.
|
||||
Returns current configuration (i.e. parameter values) of this plugin.
|
||||
|
||||
"""
|
||||
config = {}
|
||||
@ -553,13 +457,13 @@ class Extension(object):
|
||||
|
||||
def validate(self):
|
||||
"""
|
||||
Perform basic validation to ensure that this extension is capable of running.
|
||||
This is intended as an early check to ensure the extension has not been mis-configured,
|
||||
Perform basic validation to ensure that this plugin is capable of running.
|
||||
This is intended as an early check to ensure the plugin has not been mis-configured,
|
||||
rather than a comprehensive check (that may, e.g., require access to the execution
|
||||
context).
|
||||
|
||||
This method may also be used to enforce (i.e. set as well as check) inter-parameter
|
||||
constraints for the extension (e.g. if valid values for parameter A depend on the value
|
||||
constraints for the plugin (e.g. if valid values for parameter A depend on the value
|
||||
of parameter B -- something that is not possible to enfroce using ``Parameter``\ 's
|
||||
``constraint`` attribute.
|
||||
|
||||
@ -604,7 +508,7 @@ class Extension(object):
|
||||
|
||||
get_module(name, owner, **kwargs)
|
||||
|
||||
and returns an instance of :class:`wlauto.core.extension.Module`. If the module with the
|
||||
and returns an instance of :class:`wlauto.core.plugin.Module`. If the module with the
|
||||
specified name is not found, the loader must raise an appropriate exception.
|
||||
|
||||
"""
|
||||
@ -618,7 +522,7 @@ class Extension(object):
|
||||
self._install_module(module)
|
||||
|
||||
def has(self, capability):
|
||||
"""Check if this extension has the specified capability. The alternative method ``can`` is
|
||||
"""Check if this plugin has the specified capability. The alternative method ``can`` is
|
||||
identical to this. Which to use is up to the caller depending on what makes semantic sense
|
||||
in the context of the capability, e.g. ``can('hard_reset')`` vs ``has('active_cooling')``."""
|
||||
return capability in self.capabilities
|
||||
@ -652,54 +556,343 @@ class Extension(object):
|
||||
self.capabilities.append(capability)
|
||||
self._modules.append(module)
|
||||
|
||||
def __check_from_loader(self):
|
||||
|
||||
class PluginLoaderItem(object):
|
||||
|
||||
def __init__(self, ext_tuple):
|
||||
self.name = ext_tuple.name
|
||||
self.default_package = ext_tuple.default_package
|
||||
self.default_path = ext_tuple.default_path
|
||||
self.cls = load_class(ext_tuple.cls)
|
||||
|
||||
|
||||
class GlobalParameterAlias(object):
|
||||
"""
|
||||
There are a few things that need to happen in order to get a valide extension instance.
|
||||
Not all of them are currently done through standard Python initialisation mechanisms
|
||||
(specifically, the loading of modules and alias resolution). In order to avoid potential
|
||||
problems with not fully loaded extensions, make sure that an extension is *only* instantiated
|
||||
by the loader.
|
||||
Represents a "global alias" for an plugin parameter. A global alias
|
||||
is specified at the top-level of config rather namespaced under an plugin
|
||||
name.
|
||||
|
||||
"""
|
||||
stack = inspect.stack()
|
||||
stack.pop(0) # current frame
|
||||
frame = stack.pop(0)
|
||||
# skip throuth the init call chain
|
||||
while stack and frame[3] == '__init__':
|
||||
frame = stack.pop(0)
|
||||
if frame[3] != '_instantiate':
|
||||
message = 'Attempting to instantiate {} directly (must be done through an ExtensionLoader)'
|
||||
raise RuntimeError(message.format(self.__class__.__name__))
|
||||
|
||||
|
||||
class Module(Extension):
|
||||
"""
|
||||
This is a "plugin" for an extension this is intended to capture functionality that may be optional
|
||||
for an extension, and so may or may not be present in a particular setup; or, conversely, functionality
|
||||
that may be reusable between multiple devices, even if they are not with the same inheritance hierarchy.
|
||||
|
||||
In other words, a Module is roughly equivalent to a kernel module and its primary purpose is to
|
||||
implement WA "drivers" for various peripherals that may or may not be present in a particular setup.
|
||||
|
||||
.. note:: A mudule is itself an Extension and can therefore have its own modules.
|
||||
Multiple plugins may have parameters with the same global_alias if they are
|
||||
part of the same inheritance hierarchy and one parameter is an override of the
|
||||
other. This class keeps track of all such cases in its plugins dict.
|
||||
|
||||
"""
|
||||
|
||||
capabilities = []
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.plugins = {}
|
||||
|
||||
@property
|
||||
def root_owner(self):
|
||||
owner = self.owner
|
||||
while isinstance(owner, Module) and owner is not self:
|
||||
owner = owner.owner
|
||||
return owner
|
||||
def iteritems(self):
|
||||
for ext in self.plugins.itervalues():
|
||||
yield (self.get_param(ext), ext)
|
||||
|
||||
def __init__(self, owner, **kwargs):
|
||||
super(Module, self).__init__(**kwargs)
|
||||
self.owner = owner
|
||||
while isinstance(owner, Module):
|
||||
if owner.name == self.name:
|
||||
raise ValueError('Circular module import for {}'.format(self.name))
|
||||
def get_param(self, ext):
|
||||
for param in ext.parameters:
|
||||
if param.global_alias == self.name:
|
||||
return param
|
||||
message = 'Plugin {} does not have a parameter with global alias {}'
|
||||
raise ValueError(message.format(ext.name, self.name))
|
||||
|
||||
def initialize(self, context):
|
||||
def update(self, other_ext):
|
||||
self._validate_ext(other_ext)
|
||||
self.plugins[other_ext.name] = other_ext
|
||||
|
||||
def _validate_ext(self, other_ext):
|
||||
other_param = self.get_param(other_ext)
|
||||
for param, ext in self.iteritems():
|
||||
if ((not (issubclass(ext, other_ext) or issubclass(other_ext, ext))) and
|
||||
other_param.kind != param.kind):
|
||||
message = 'Duplicate global alias {} declared in {} and {} plugins with different types'
|
||||
raise LoaderError(message.format(self.name, ext.name, other_ext.name))
|
||||
if param.kind != other_param.kind:
|
||||
message = 'Two params {} in {} and {} in {} both declare global alias {}, and are of different kinds'
|
||||
raise LoaderError(message.format(param.name, ext.name,
|
||||
other_param.name, other_ext.name, self.name))
|
||||
|
||||
def __str__(self):
|
||||
text = 'GlobalAlias({} => {})'
|
||||
extlist = ', '.join(['{}.{}'.format(e.name, p.name) for p, e in self.iteritems()])
|
||||
return text.format(self.name, extlist)
|
||||
|
||||
|
||||
class PluginLoader(object):
|
||||
"""
|
||||
Discovers, enumerates and loads available devices, configs, etc.
|
||||
The loader will attempt to discover things on construction by looking
|
||||
in predetermined set of locations defined by default_paths. Optionally,
|
||||
additional locations may specified through paths parameter that must
|
||||
be a list of additional Python module paths (i.e. dot-delimited).
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, packages=None, paths=None, ignore_paths=None, keep_going=False):
|
||||
"""
|
||||
params::
|
||||
|
||||
:packages: List of packages to load plugins from.
|
||||
:paths: List of paths to be searched for Python modules containing
|
||||
WA plugins.
|
||||
:ignore_paths: List of paths to ignore when search for WA plugins (these would
|
||||
typically be subdirectories of one or more locations listed in
|
||||
``paths`` parameter.
|
||||
:keep_going: Specifies whether to keep going if an error occurs while loading
|
||||
plugins.
|
||||
"""
|
||||
self.logger = logging.getLogger('pluginloader')
|
||||
self.keep_going = keep_going
|
||||
self.packages = packages or []
|
||||
self.paths = paths or []
|
||||
self.ignore_paths = ignore_paths or []
|
||||
self.plugins = {}
|
||||
self.kind_map = defaultdict(dict)
|
||||
self.aliases = {}
|
||||
self.global_param_aliases = {}
|
||||
self._discover_from_packages(self.packages)
|
||||
self._discover_from_paths(self.paths, self.ignore_paths)
|
||||
|
||||
def update(self, packages=None, paths=None, ignore_paths=None):
|
||||
""" Load plugins from the specified paths/packages
|
||||
without clearing or reloading existing plugin. """
|
||||
if packages:
|
||||
self.packages.extend(packages)
|
||||
self._discover_from_packages(packages)
|
||||
if paths:
|
||||
self.paths.extend(paths)
|
||||
self.ignore_paths.extend(ignore_paths or [])
|
||||
self._discover_from_paths(paths, ignore_paths or [])
|
||||
|
||||
def clear(self):
|
||||
""" Clear all discovered items. """
|
||||
self.plugins = []
|
||||
self.kind_map.clear()
|
||||
|
||||
def reload(self):
|
||||
""" Clear all discovered items and re-run the discovery. """
|
||||
self.clear()
|
||||
self._discover_from_packages(self.packages)
|
||||
self._discover_from_paths(self.paths, self.ignore_paths)
|
||||
|
||||
def get_plugin_class(self, name, kind=None):
|
||||
"""
|
||||
Return the class for the specified plugin if found or raises ``ValueError``.
|
||||
|
||||
"""
|
||||
name, _ = self.resolve_alias(name)
|
||||
if kind is None:
|
||||
try:
|
||||
return self.plugins[name]
|
||||
except KeyError:
|
||||
raise NotFoundError('plugins {} not found.'.format(name))
|
||||
if kind not in self.kind_map:
|
||||
raise ValueError('Unknown plugin type: {}'.format(kind))
|
||||
store = self.kind_map[kind]
|
||||
if name not in store:
|
||||
raise NotFoundError('plugins {} is not {} {}.'.format(name, get_article(kind), kind))
|
||||
return store[name]
|
||||
|
||||
def get_plugin(self, name=None, kind=None, *args, **kwargs):
|
||||
"""
|
||||
Return plugin of the specified kind with the specified name. Any
|
||||
additional parameters will be passed to the plugin's __init__.
|
||||
|
||||
"""
|
||||
name, base_kwargs = self.resolve_alias(name)
|
||||
kwargs = OrderedDict(chain(base_kwargs.iteritems(), kwargs.iteritems()))
|
||||
cls = self.get_plugin_class(name, kind)
|
||||
plugin = cls(*args, **kwargs)
|
||||
return plugin
|
||||
|
||||
def get_default_config(self, name):
|
||||
"""
|
||||
Returns the default configuration for the specified plugin name. The
|
||||
name may be an alias, in which case, the returned config will be
|
||||
augmented with appropriate alias overrides.
|
||||
|
||||
"""
|
||||
real_name, alias_config = self.resolve_alias(name)
|
||||
base_default_config = self.get_plugin_class(real_name).get_default_config()
|
||||
return merge_dicts(base_default_config, alias_config, list_duplicates='last', dict_type=OrderedDict)
|
||||
|
||||
def list_plugins(self, kind=None):
|
||||
"""
|
||||
List discovered plugin classes. Optionally, only list plugins of a
|
||||
particular type.
|
||||
|
||||
"""
|
||||
if kind is None:
|
||||
return self.plugins.values()
|
||||
if kind not in self.kind_map:
|
||||
raise ValueError('Unknown plugin type: {}'.format(kind))
|
||||
return self.kind_map[kind].values()
|
||||
|
||||
def has_plugin(self, name, kind=None):
|
||||
"""
|
||||
Returns ``True`` if an plugins with the specified ``name`` has been
|
||||
discovered by the loader. If ``kind`` was specified, only returns ``True``
|
||||
if the plugin has been found, *and* it is of the specified kind.
|
||||
|
||||
"""
|
||||
try:
|
||||
self.get_plugin_class(name, kind)
|
||||
return True
|
||||
except NotFoundError:
|
||||
return False
|
||||
|
||||
def resolve_alias(self, alias_name):
|
||||
"""
|
||||
Try to resolve the specified name as an plugin alias. Returns a
|
||||
two-tuple, the first value of which is actual plugin name, and the
|
||||
iisecond is a dict of parameter values for this alias. If the name passed
|
||||
is already an plugin name, then the result is ``(alias_name, {})``.
|
||||
|
||||
"""
|
||||
alias_name = identifier(alias_name.lower())
|
||||
if alias_name in self.plugins:
|
||||
return (alias_name, {})
|
||||
if alias_name in self.aliases:
|
||||
alias = self.aliases[alias_name]
|
||||
return (alias.plugin_name, alias.parameters)
|
||||
raise NotFoundError('Could not find plugin or alias "{}"'.format(alias_name))
|
||||
|
||||
# Internal methods.
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""
|
||||
This resolves methods for specific plugins types based on corresponding
|
||||
generic plugin methods. So it's possible to say things like ::
|
||||
|
||||
loader.get_device('foo')
|
||||
|
||||
instead of ::
|
||||
|
||||
loader.get_plugin('foo', kind='device')
|
||||
|
||||
"""
|
||||
if name.startswith('get_'):
|
||||
name = name.replace('get_', '', 1)
|
||||
if name in self.kind_map:
|
||||
def __wrapper(pname, *args, **kwargs):
|
||||
return self.get_plugin(pname, name, *args, **kwargs)
|
||||
return __wrapper
|
||||
if name.startswith('list_'):
|
||||
name = name.replace('list_', '', 1).rstrip('s')
|
||||
if name in self.kind_map:
|
||||
def __wrapper(*args, **kwargs): # pylint: disable=E0102
|
||||
return self.list_plugins(name, *args, **kwargs)
|
||||
return __wrapper
|
||||
if name.startswith('has_'):
|
||||
name = name.replace('has_', '', 1)
|
||||
if name in self.kind_map:
|
||||
def __wrapper(pname, *args, **kwargs): # pylint: disable=E0102
|
||||
return self.has_plugin(pname, name, *args, **kwargs)
|
||||
return __wrapper
|
||||
raise AttributeError(name)
|
||||
|
||||
def _discover_from_packages(self, packages):
|
||||
self.logger.debug('Discovering plugins in packages')
|
||||
try:
|
||||
for package in packages:
|
||||
for module in walk_modules(package):
|
||||
self._discover_in_module(module)
|
||||
except ImportError as e:
|
||||
source = getattr(e, 'path', package)
|
||||
message = 'Problem loading plugins from {}: {}'
|
||||
raise LoaderError(message.format(source, e.message))
|
||||
|
||||
def _discover_from_paths(self, paths, ignore_paths):
|
||||
paths = paths or []
|
||||
ignore_paths = ignore_paths or []
|
||||
|
||||
self.logger.debug('Discovering plugins in paths')
|
||||
for path in paths:
|
||||
self.logger.debug('Checking path %s', path)
|
||||
if os.path.isfile(path):
|
||||
self._discover_from_file(path)
|
||||
for root, _, files in os.walk(path, followlinks=True):
|
||||
should_skip = False
|
||||
for igpath in ignore_paths:
|
||||
if root.startswith(igpath):
|
||||
should_skip = True
|
||||
break
|
||||
if should_skip:
|
||||
continue
|
||||
for fname in files:
|
||||
if os.path.splitext(fname)[1].lower() != '.py':
|
||||
continue
|
||||
filepath = os.path.join(root, fname)
|
||||
self._discover_from_file(filepath)
|
||||
|
||||
def _discover_from_file(self, filepath):
|
||||
try:
|
||||
modname = os.path.splitext(filepath[1:])[0].translate(MODNAME_TRANS)
|
||||
module = imp.load_source(modname, filepath)
|
||||
self._discover_in_module(module)
|
||||
except (SystemExit, ImportError), e:
|
||||
if self.keep_going:
|
||||
self.logger.warning('Failed to load {}'.format(filepath))
|
||||
self.logger.warning('Got: {}'.format(e))
|
||||
else:
|
||||
raise LoaderError('Failed to load {}'.format(filepath), sys.exc_info())
|
||||
except Exception as e:
|
||||
message = 'Problem loading plugins from {}: {}'
|
||||
raise LoaderError(message.format(filepath, e))
|
||||
|
||||
def _discover_in_module(self, module): # NOQA pylint: disable=too-many-branches
|
||||
self.logger.debug('Checking module %s', module.__name__)
|
||||
#log.indent()
|
||||
try:
|
||||
for obj in vars(module).itervalues():
|
||||
if inspect.isclass(obj):
|
||||
if not issubclass(obj, Plugin):
|
||||
continue
|
||||
if not obj.kind:
|
||||
message = 'Skipping plugin {} as it does not define a kind'
|
||||
self.logger.debug(message.format(obj.__name__))
|
||||
continue
|
||||
if not obj.name:
|
||||
message = 'Skipping {} {} as it does not define a name'
|
||||
self.logger.debug(message.format(obj.kind, obj.__name__))
|
||||
continue
|
||||
try:
|
||||
self._add_found_plugin(obj)
|
||||
except LoaderError as e:
|
||||
if self.keep_going:
|
||||
self.logger.warning(e)
|
||||
else:
|
||||
raise e
|
||||
finally:
|
||||
# log.dedent()
|
||||
pass
|
||||
|
||||
def _add_found_plugin(self, obj):
|
||||
"""
|
||||
:obj: Found plugin class
|
||||
:ext: matching plugin item.
|
||||
"""
|
||||
self.logger.debug('Adding %s %s', obj.kind, obj.name)
|
||||
key = identifier(obj.name.lower())
|
||||
if key in self.plugins or key in self.aliases:
|
||||
raise LoaderError('{} "{}" already exists.'.format(obj.kind, obj.name))
|
||||
# plugins are tracked both, in a common plugins
|
||||
# dict, and in per-plugin kind dict (as retrieving
|
||||
# plugins by kind is a common use case.
|
||||
self.plugins[key] = obj
|
||||
self.kind_map[obj.kind][key] = obj
|
||||
|
||||
for alias in obj.aliases:
|
||||
alias_id = identifier(alias.name.lower())
|
||||
if alias_id in self.plugins or alias_id in self.aliases:
|
||||
raise LoaderError('{} "{}" already exists.'.format(obj.kind, obj.name))
|
||||
self.aliases[alias_id] = alias
|
||||
|
||||
# Update global aliases list. If a global alias is already in the list,
|
||||
# then make sure this plugin is in the same parent/child hierarchy
|
||||
# as the one already found.
|
||||
for param in obj.parameters:
|
||||
if param.global_alias:
|
||||
if param.global_alias not in self.global_param_aliases:
|
||||
ga = GlobalParameterAlias(param.global_alias)
|
||||
ga.update(obj)
|
||||
self.global_param_aliases[ga.name] = ga
|
||||
else: # global alias already exists.
|
||||
self.global_param_aliases[param.global_alias].update(obj)
|
90
wlauto/core/pluginloader.py
Normal file
90
wlauto/core/pluginloader.py
Normal file
@ -0,0 +1,90 @@
|
||||
# Copyright 2013-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import sys
|
||||
|
||||
|
||||
class __LoaderWrapper(object):
|
||||
|
||||
@property
|
||||
def kinds(self):
|
||||
if not self._loader:
|
||||
self.reset()
|
||||
return self._loader.kind_map.keys()
|
||||
|
||||
@property
|
||||
def kind_map(self):
|
||||
if not self._loader:
|
||||
self.reset()
|
||||
return self._loader.kind_map
|
||||
|
||||
def __init__(self):
|
||||
self._loader = None
|
||||
|
||||
def reset(self):
|
||||
# These imports cannot be done at top level, because of
|
||||
# sys.modules manipulation below
|
||||
from wlauto.core.plugin import PluginLoader
|
||||
from wlauto.core.config.core import settings
|
||||
self._loader = PluginLoader(settings.plugin_packages,
|
||||
settings.plugin_paths,
|
||||
settings.plugin_ignore_paths)
|
||||
|
||||
def update(self, packages=None, paths=None, ignore_paths=None):
|
||||
if not self._loader:
|
||||
self.reset()
|
||||
self._loader.update(packages, paths, ignore_paths)
|
||||
|
||||
def reload(self):
|
||||
if not self._loader:
|
||||
self.reset()
|
||||
self._loader.reload()
|
||||
|
||||
def list_plugins(self, kind=None):
|
||||
if not self._loader:
|
||||
self.reset()
|
||||
return self._loader.list_plugins(kind)
|
||||
|
||||
def has_plugin(self, name, kind=None):
|
||||
if not self._loader:
|
||||
self.reset()
|
||||
return self._loader.has_plugin(name, kind)
|
||||
|
||||
def get_plugin_class(self, name, kind=None):
|
||||
if not self._loader:
|
||||
self.reset()
|
||||
return self._loader.get_plugin_class(name, kind)
|
||||
|
||||
def get_plugin(self, name=None, kind=None, *args, **kwargs):
|
||||
if not self._loader:
|
||||
self.reset()
|
||||
return self._loader.get_plugin(name=name, kind=kind, *args, **kwargs)
|
||||
|
||||
def get_default_config(self, name):
|
||||
if not self._loader:
|
||||
self.reset()
|
||||
return self._loader.get_default_config(name)
|
||||
|
||||
def resolve_alias(self, name):
|
||||
if not self._loader:
|
||||
self.reset()
|
||||
return self._loader.resolve_alias(name)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if not self._loader:
|
||||
self.reset()
|
||||
return getattr(self._loader, name)
|
||||
|
||||
|
||||
sys.modules[__name__] = __LoaderWrapper()
|
@ -24,10 +24,10 @@ from collections import defaultdict
|
||||
|
||||
# Note: this is the modified louie library in wlauto/external.
|
||||
# prioritylist does not exist in vanilla louie.
|
||||
from louie.prioritylist import PriorityList # pylint: disable=E0611,F0401
|
||||
from wlauto.utils.types import prioritylist # pylint: disable=E0611,F0401
|
||||
|
||||
from wlauto.exceptions import ResourceError
|
||||
|
||||
from wlauto.core import pluginloader
|
||||
|
||||
class ResourceResolver(object):
|
||||
"""
|
||||
@ -38,7 +38,7 @@ class ResourceResolver(object):
|
||||
|
||||
def __init__(self, config):
|
||||
self.logger = logging.getLogger(self.__class__.__name__)
|
||||
self.getters = defaultdict(PriorityList)
|
||||
self.getters = defaultdict(prioritylist)
|
||||
self.config = config
|
||||
|
||||
def load(self):
|
||||
@ -47,8 +47,9 @@ class ResourceResolver(object):
|
||||
be either a python package/module or a path.
|
||||
|
||||
"""
|
||||
|
||||
for rescls in self.config.ext_loader.list_resource_getters():
|
||||
getter = self.config.get_extension(rescls.name, self)
|
||||
getter = self.config.get_plugin(name=rescls.name, kind="resource_getter", resolver=self)
|
||||
getter.register()
|
||||
|
||||
def get(self, resource, strict=True, *args, **kwargs):
|
||||
@ -95,7 +96,7 @@ class ResourceResolver(object):
|
||||
means should register with lower (negative) priorities.
|
||||
|
||||
"""
|
||||
self.logger.debug('Registering {}'.format(getter.name))
|
||||
self.logger.debug('Registering {} for {} resources'.format(getter.name, kind))
|
||||
self.getters[kind].add(getter, priority)
|
||||
|
||||
def unregister(self, getter, kind):
|
||||
|
@ -13,8 +13,8 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from wlauto.core.bootstrap import settings
|
||||
from wlauto.core.extension import Extension
|
||||
from wlauto.core.config.core import settings
|
||||
from wlauto.core.plugin import Plugin
|
||||
|
||||
|
||||
class GetterPriority(object):
|
||||
@ -77,7 +77,7 @@ class Resource(object):
|
||||
return '<{}\'s {}>'.format(self.owner, self.name)
|
||||
|
||||
|
||||
class ResourceGetter(Extension):
|
||||
class ResourceGetter(Plugin):
|
||||
"""
|
||||
Base class for implementing resolvers. Defines resolver interface. Resolvers are
|
||||
responsible for discovering resources (such as particular kinds of files) they know
|
||||
@ -97,11 +97,12 @@ class ResourceGetter(Extension):
|
||||
|
||||
"""
|
||||
|
||||
kind = "resource_getter"
|
||||
name = None
|
||||
resource_type = None
|
||||
priority = GetterPriority.environment
|
||||
|
||||
def __init__(self, resolver, **kwargs):
|
||||
def __init__(self, resolver=None, **kwargs):
|
||||
super(ResourceGetter, self).__init__(**kwargs)
|
||||
self.resolver = resolver
|
||||
|
||||
|
@ -41,7 +41,7 @@ from copy import copy
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
|
||||
from wlauto.core.extension import Extension
|
||||
from wlauto.core.plugin import Plugin
|
||||
from wlauto.exceptions import WAError
|
||||
from wlauto.utils.types import numeric
|
||||
from wlauto.utils.misc import enum_metaclass, merge_dicts
|
||||
@ -131,7 +131,7 @@ class ResultManager(object):
|
||||
self._bad.append(processor)
|
||||
|
||||
|
||||
class ResultProcessor(Extension):
|
||||
class ResultProcessor(Plugin):
|
||||
"""
|
||||
Base class for result processors. Defines an interface that should be implemented
|
||||
by the subclasses. A result processor can be used to do any kind of post-processing
|
||||
@ -139,7 +139,7 @@ class ResultProcessor(Extension):
|
||||
performing calculations, generating plots, etc.
|
||||
|
||||
"""
|
||||
|
||||
kind = "result_processor"
|
||||
def initialize(self, context):
|
||||
pass
|
||||
|
||||
@ -327,4 +327,3 @@ class Metric(object):
|
||||
return '<{}>'.format(result)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
@ -19,7 +19,15 @@ This module wraps louie signalling mechanism. It relies on modified version of l
|
||||
that has prioritization added to handler invocation.
|
||||
|
||||
"""
|
||||
from louie import dispatcher # pylint: disable=F0401
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
|
||||
from louie import dispatcher
|
||||
|
||||
from wlauto.utils.types import prioritylist
|
||||
|
||||
|
||||
logger = logging.getLogger('dispatcher')
|
||||
|
||||
|
||||
class Signal(object):
|
||||
@ -30,7 +38,7 @@ class Signal(object):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, name, invert_priority=False):
|
||||
def __init__(self, name, description='no description', invert_priority=False):
|
||||
"""
|
||||
Instantiates a Signal.
|
||||
|
||||
@ -44,6 +52,7 @@ class Signal(object):
|
||||
priorities will be called right after the event has occured.
|
||||
"""
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.invert_priority = invert_priority
|
||||
|
||||
def __str__(self):
|
||||
@ -116,6 +125,32 @@ ERROR_LOGGED = Signal('error_logged')
|
||||
WARNING_LOGGED = Signal('warning_logged')
|
||||
|
||||
|
||||
class CallbackPriority(object):
|
||||
|
||||
EXTREMELY_HIGH = 30
|
||||
VERY_HIGH = 20
|
||||
HIGH = 10
|
||||
NORMAL = 0
|
||||
LOW = -10
|
||||
VERY_LOW = -20
|
||||
EXTREMELY_LOW = -30
|
||||
|
||||
def __init__(self):
|
||||
raise ValueError('Cannot instantiate')
|
||||
|
||||
|
||||
class _prioritylist_wrapper(prioritylist):
|
||||
"""
|
||||
This adds a NOP append() method so that when louie invokes it to add the
|
||||
handler to receivers, nothing will happen; the handler is actually added inside
|
||||
the connect() below according to priority, before louie's connect() gets invoked.
|
||||
|
||||
"""
|
||||
|
||||
def append(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
def connect(handler, signal, sender=dispatcher.Any, priority=0):
|
||||
"""
|
||||
Connects a callback to a signal, so that the callback will be automatically invoked
|
||||
@ -124,10 +159,10 @@ def connect(handler, signal, sender=dispatcher.Any, priority=0):
|
||||
Parameters:
|
||||
|
||||
:handler: This can be any callable that that takes the right arguments for
|
||||
the signal. For most siginals this means a single argument that
|
||||
will be an ``ExecutionContext`` instance. But please see documentaion
|
||||
the signal. For most signals this means a single argument that
|
||||
will be an ``ExecutionContext`` instance. But please see documentation
|
||||
for individual signals in the :ref:`signals reference <instrumentation_method_map>`.
|
||||
:signal: The signal to which the hanlder will be subscribed. Please see
|
||||
:signal: The signal to which the handler will be subscribed. Please see
|
||||
:ref:`signals reference <instrumentation_method_map>` for the list of standard WA
|
||||
signals.
|
||||
|
||||
@ -137,7 +172,7 @@ def connect(handler, signal, sender=dispatcher.Any, priority=0):
|
||||
|
||||
:sender: The handler will be invoked only for the signals emitted by this sender. By
|
||||
default, this is set to :class:`louie.dispatcher.Any`, so the handler will
|
||||
be invoked for signals from any sentder.
|
||||
be invoked for signals from any sender.
|
||||
:priority: An integer (positive or negative) the specifies the priority of the handler.
|
||||
Handlers with higher priority will be called before handlers with lower
|
||||
priority. The call order of handlers with the same priority is not specified.
|
||||
@ -148,10 +183,19 @@ def connect(handler, signal, sender=dispatcher.Any, priority=0):
|
||||
for details.
|
||||
|
||||
"""
|
||||
if signal.invert_priority:
|
||||
dispatcher.connect(handler, signal, sender, priority=-priority) # pylint: disable=E1123
|
||||
if getattr(signal, 'invert_priority', False):
|
||||
priority = -priority
|
||||
senderkey = id(sender)
|
||||
if senderkey in dispatcher.connections:
|
||||
signals = dispatcher.connections[senderkey]
|
||||
else:
|
||||
dispatcher.connect(handler, signal, sender, priority=priority) # pylint: disable=E1123
|
||||
dispatcher.connections[senderkey] = signals = {}
|
||||
if signal in signals:
|
||||
receivers = signals[signal]
|
||||
else:
|
||||
receivers = signals[signal] = _prioritylist_wrapper()
|
||||
receivers.add(handler, priority)
|
||||
dispatcher.connect(handler, signal, sender)
|
||||
|
||||
|
||||
def disconnect(handler, signal, sender=dispatcher.Any):
|
||||
@ -171,7 +215,7 @@ def disconnect(handler, signal, sender=dispatcher.Any):
|
||||
dispatcher.disconnect(handler, signal, sender)
|
||||
|
||||
|
||||
def send(signal, sender, *args, **kwargs):
|
||||
def send(signal, sender=dispatcher.Anonymous, *args, **kwargs):
|
||||
"""
|
||||
Sends a signal, causing connected handlers to be invoked.
|
||||
|
||||
@ -185,5 +229,44 @@ def send(signal, sender, *args, **kwargs):
|
||||
The rest of the parameters will be passed on as aruments to the handler.
|
||||
|
||||
"""
|
||||
dispatcher.send(signal, sender, *args, **kwargs)
|
||||
return dispatcher.send(signal, sender, *args, **kwargs)
|
||||
|
||||
|
||||
# This will normally be set to log_error() by init_logging(); see wa.framework/log.py.
|
||||
# Done this way to prevent a circular import dependency.
|
||||
log_error_func = logger.error
|
||||
|
||||
|
||||
def safe_send(signal, sender=dispatcher.Anonymous,
|
||||
propagate=[KeyboardInterrupt], *args, **kwargs):
|
||||
"""
|
||||
Same as ``send``, except this will catch and log all exceptions raised
|
||||
by handlers, except those specified in ``propagate`` argument (defaults
|
||||
to just ``[KeyboardInterrupt]``).
|
||||
"""
|
||||
try:
|
||||
send(singnal, sender, *args, **kwargs)
|
||||
except Exception as e:
|
||||
if any(isinstance(e, p) for p in propagate):
|
||||
raise e
|
||||
log_error_func(e)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def wrap(signal_name, sender=dispatcher.Anonymous, safe=False, *args, **kwargs):
|
||||
"""Wraps the suite in before/after signals, ensuring
|
||||
that after signal is always sent."""
|
||||
signal_name = signal_name.upper().replace('-', '_')
|
||||
send_func = safe_send if safe else send
|
||||
try:
|
||||
before_signal = globals()['BEFORE_' + signal_name]
|
||||
success_signal = globals()['SUCCESSFUL_' + signal_name]
|
||||
after_signal = globals()['AFTER_' + signal_name]
|
||||
except KeyError:
|
||||
raise ValueError('Invalid wrapped signal name: {}'.format(signal_name))
|
||||
try:
|
||||
send_func(before_signal, sender, *args, **kwargs)
|
||||
yield
|
||||
send_func(success_signal, sender, *args, **kwargs)
|
||||
finally:
|
||||
send_func(after_signal, sender, *args, **kwargs)
|
||||
|
@ -22,18 +22,18 @@ execution of a workload produces one :class:`wlauto.core.result.WorkloadResult`
|
||||
:class:`wlauto.core.result.Artifact`\s by the workload and active instrumentation.
|
||||
|
||||
"""
|
||||
from wlauto.core.extension import Extension
|
||||
from wlauto.core.plugin import Plugin
|
||||
from wlauto.exceptions import WorkloadError
|
||||
|
||||
|
||||
class Workload(Extension):
|
||||
class Workload(Plugin):
|
||||
"""
|
||||
This is the base class for the workloads executed by the framework.
|
||||
Each of the methods throwing NotImplementedError *must* be implemented
|
||||
by the derived classes.
|
||||
|
||||
"""
|
||||
|
||||
kind = "workload"
|
||||
supported_devices = []
|
||||
supported_platforms = []
|
||||
summary_metrics = []
|
||||
|
@ -28,7 +28,7 @@ class NotFoundError(WAError):
|
||||
|
||||
|
||||
class ValidationError(WAError):
|
||||
"""Raised on failure to validate an extension."""
|
||||
"""Raised on failure to validate an plugin."""
|
||||
pass
|
||||
|
||||
|
||||
@ -58,8 +58,8 @@ class ModuleError(WAError):
|
||||
"""
|
||||
Problem with a module.
|
||||
|
||||
.. note:: Modules for specific extension types should raise execeptions
|
||||
appropriate to that extension. E.g. a ``Device`` module should raise
|
||||
.. note:: Modules for specific plugin types should raise execeptions
|
||||
appropriate to that plugin. E.g. a ``Device`` module should raise
|
||||
``DeviceError``. This is intended for situation where a module is
|
||||
unsure (and/or doesn't care) what its owner is.
|
||||
|
||||
@ -95,7 +95,7 @@ class ToolError(WAError):
|
||||
|
||||
|
||||
class LoaderError(WAError):
|
||||
"""Raised when there is an error loading an extension or
|
||||
"""Raised when there is an error loading an plugin or
|
||||
an external resource. Apart form the usual message, the __init__
|
||||
takes an exc_info parameter which should be the result of
|
||||
sys.exc_info() for the original exception (if any) that
|
||||
|
12
wlauto/external/louie/LICENSE
vendored
12
wlauto/external/louie/LICENSE
vendored
@ -1,12 +0,0 @@
|
||||
This directory contains Louie package that has been modified by ARM Ltd.
|
||||
Original Louie package is licensed under BSD license. ARM Ltd. changes are
|
||||
licensed under Apache version 2 license.
|
||||
|
||||
Original Louie package may be found here:
|
||||
|
||||
https://pypi.python.org/pypi/Louie/1.1
|
||||
|
||||
The text of the BSD License may be viewed here:
|
||||
|
||||
http://opensource.org/licenses/bsd-license.php
|
||||
|
46
wlauto/external/louie/__init__.py
vendored
46
wlauto/external/louie/__init__.py
vendored
@ -1,46 +0,0 @@
|
||||
__all__ = [
|
||||
'dispatcher',
|
||||
'error',
|
||||
'plugin',
|
||||
'robustapply',
|
||||
'saferef',
|
||||
'sender',
|
||||
'signal',
|
||||
'version',
|
||||
|
||||
'connect',
|
||||
'disconnect',
|
||||
'get_all_receivers',
|
||||
'reset',
|
||||
'send',
|
||||
'send_exact',
|
||||
'send_minimal',
|
||||
'send_robust',
|
||||
|
||||
'install_plugin',
|
||||
'remove_plugin',
|
||||
'Plugin',
|
||||
'QtWidgetPlugin',
|
||||
'TwistedDispatchPlugin',
|
||||
|
||||
'Anonymous',
|
||||
'Any',
|
||||
|
||||
'All',
|
||||
'Signal',
|
||||
]
|
||||
|
||||
import louie.dispatcher, louie.error, louie.plugin, louie.robustapply, \
|
||||
louie.saferef, louie.sender, louie.signal, louie.version
|
||||
|
||||
from louie.dispatcher import \
|
||||
connect, disconnect, get_all_receivers, reset, \
|
||||
send, send_exact, send_minimal, send_robust
|
||||
|
||||
from louie.plugin import \
|
||||
install_plugin, remove_plugin, Plugin, \
|
||||
QtWidgetPlugin, TwistedDispatchPlugin
|
||||
|
||||
from louie.sender import Anonymous, Any
|
||||
|
||||
from louie.signal import All, Signal
|
591
wlauto/external/louie/dispatcher.py
vendored
591
wlauto/external/louie/dispatcher.py
vendored
@ -1,591 +0,0 @@
|
||||
"""Multiple-producer-multiple-consumer signal-dispatching.
|
||||
|
||||
``dispatcher`` is the core of Louie, providing the primary API and the
|
||||
core logic for the system.
|
||||
|
||||
Internal attributes:
|
||||
|
||||
- ``WEAKREF_TYPES``: Tuple of types/classes which represent weak
|
||||
references to receivers, and thus must be dereferenced on retrieval
|
||||
to retrieve the callable object
|
||||
|
||||
- ``connections``::
|
||||
|
||||
{ senderkey (id) : { signal : [receivers...] } }
|
||||
|
||||
- ``senders``: Used for cleaning up sender references on sender
|
||||
deletion::
|
||||
|
||||
{ senderkey (id) : weakref(sender) }
|
||||
|
||||
- ``senders_back``: Used for cleaning up receiver references on receiver
|
||||
deletion::
|
||||
|
||||
{ receiverkey (id) : [senderkey (id)...] }
|
||||
"""
|
||||
|
||||
import os
|
||||
import weakref
|
||||
|
||||
try:
|
||||
set
|
||||
except NameError:
|
||||
from sets import Set as set, ImmutableSet as frozenset
|
||||
|
||||
from louie import error
|
||||
from louie import robustapply
|
||||
from louie import saferef
|
||||
from louie.sender import Any, Anonymous
|
||||
from louie.signal import All
|
||||
from prioritylist import PriorityList
|
||||
|
||||
|
||||
# Support for statistics.
|
||||
if __debug__:
|
||||
connects = 0
|
||||
disconnects = 0
|
||||
sends = 0
|
||||
|
||||
def print_stats():
|
||||
print ('\n'
|
||||
'Louie connects: %i\n'
|
||||
'Louie disconnects: %i\n'
|
||||
'Louie sends: %i\n'
|
||||
'\n') % (connects, disconnects, sends)
|
||||
|
||||
if 'PYDISPATCH_STATS' in os.environ:
|
||||
import atexit
|
||||
atexit.register(print_stats)
|
||||
|
||||
|
||||
|
||||
WEAKREF_TYPES = (weakref.ReferenceType, saferef.BoundMethodWeakref)
|
||||
|
||||
|
||||
connections = {}
|
||||
senders = {}
|
||||
senders_back = {}
|
||||
plugins = []
|
||||
|
||||
def reset():
|
||||
"""Reset the state of Louie.
|
||||
|
||||
Useful during unit testing. Should be avoided otherwise.
|
||||
"""
|
||||
global connections, senders, senders_back, plugins
|
||||
connections = {}
|
||||
senders = {}
|
||||
senders_back = {}
|
||||
plugins = []
|
||||
|
||||
|
||||
def connect(receiver, signal=All, sender=Any, weak=True, priority=0):
|
||||
"""Connect ``receiver`` to ``sender`` for ``signal``.
|
||||
|
||||
- ``receiver``: A callable Python object which is to receive
|
||||
messages/signals/events. Receivers must be hashable objects.
|
||||
|
||||
If weak is ``True``, then receiver must be weak-referencable (more
|
||||
precisely ``saferef.safe_ref()`` must be able to create a
|
||||
reference to the receiver).
|
||||
|
||||
Receivers are fairly flexible in their specification, as the
|
||||
machinery in the ``robustapply`` module takes care of most of the
|
||||
details regarding figuring out appropriate subsets of the sent
|
||||
arguments to apply to a given receiver.
|
||||
|
||||
Note: If ``receiver`` is itself a weak reference (a callable), it
|
||||
will be de-referenced by the system's machinery, so *generally*
|
||||
weak references are not suitable as receivers, though some use
|
||||
might be found for the facility whereby a higher-level library
|
||||
passes in pre-weakrefed receiver references.
|
||||
|
||||
- ``signal``: The signal to which the receiver should respond.
|
||||
|
||||
If ``All``, receiver will receive all signals from the indicated
|
||||
sender (which might also be ``All``, but is not necessarily
|
||||
``All``).
|
||||
|
||||
Otherwise must be a hashable Python object other than ``None``
|
||||
(``DispatcherError`` raised on ``None``).
|
||||
|
||||
- ``sender``: The sender to which the receiver should respond.
|
||||
|
||||
If ``Any``, receiver will receive the indicated signals from any
|
||||
sender.
|
||||
|
||||
If ``Anonymous``, receiver will only receive indicated signals
|
||||
from ``send``/``send_exact`` which do not specify a sender, or
|
||||
specify ``Anonymous`` explicitly as the sender.
|
||||
|
||||
Otherwise can be any python object.
|
||||
|
||||
- ``weak``: Whether to use weak references to the receiver.
|
||||
|
||||
By default, the module will attempt to use weak references to
|
||||
the receiver objects. If this parameter is ``False``, then strong
|
||||
references will be used.
|
||||
|
||||
- ``priority``: specifies the priority by which a reciever should
|
||||
get notified
|
||||
|
||||
Returns ``None``, may raise ``DispatcherTypeError``.
|
||||
"""
|
||||
if signal is None:
|
||||
raise error.DispatcherTypeError(
|
||||
'Signal cannot be None (receiver=%r sender=%r)'
|
||||
% (receiver, sender))
|
||||
if weak:
|
||||
receiver = saferef.safe_ref(receiver, on_delete=_remove_receiver)
|
||||
senderkey = id(sender)
|
||||
if connections.has_key(senderkey):
|
||||
signals = connections[senderkey]
|
||||
else:
|
||||
connections[senderkey] = signals = {}
|
||||
# Keep track of senders for cleanup.
|
||||
# Is Anonymous something we want to clean up?
|
||||
if sender not in (None, Anonymous, Any):
|
||||
def remove(object, senderkey=senderkey):
|
||||
_remove_sender(senderkey=senderkey)
|
||||
# Skip objects that can not be weakly referenced, which means
|
||||
# they won't be automatically cleaned up, but that's too bad.
|
||||
try:
|
||||
weak_sender = weakref.ref(sender, remove)
|
||||
senders[senderkey] = weak_sender
|
||||
except:
|
||||
pass
|
||||
receiver_id = id(receiver)
|
||||
# get current set, remove any current references to
|
||||
# this receiver in the set, including back-references
|
||||
if signals.has_key(signal):
|
||||
receivers = signals[signal]
|
||||
_remove_old_back_refs(senderkey, signal, receiver, receivers)
|
||||
else:
|
||||
receivers = signals[signal] = PriorityList()
|
||||
try:
|
||||
current = senders_back.get(receiver_id)
|
||||
if current is None:
|
||||
senders_back[receiver_id] = current = []
|
||||
if senderkey not in current:
|
||||
current.append(senderkey)
|
||||
except:
|
||||
pass
|
||||
receivers.add(receiver, priority)
|
||||
# Update stats.
|
||||
if __debug__:
|
||||
global connects
|
||||
connects += 1
|
||||
|
||||
|
||||
def disconnect(receiver, signal=All, sender=Any, weak=True):
|
||||
"""Disconnect ``receiver`` from ``sender`` for ``signal``.
|
||||
|
||||
- ``receiver``: The registered receiver to disconnect.
|
||||
|
||||
- ``signal``: The registered signal to disconnect.
|
||||
|
||||
- ``sender``: The registered sender to disconnect.
|
||||
|
||||
- ``weak``: The weakref state to disconnect.
|
||||
|
||||
``disconnect`` reverses the process of ``connect``, the semantics for
|
||||
the individual elements are logically equivalent to a tuple of
|
||||
``(receiver, signal, sender, weak)`` used as a key to be deleted
|
||||
from the internal routing tables. (The actual process is slightly
|
||||
more complex but the semantics are basically the same).
|
||||
|
||||
Note: Using ``disconnect`` is not required to cleanup routing when
|
||||
an object is deleted; the framework will remove routes for deleted
|
||||
objects automatically. It's only necessary to disconnect if you
|
||||
want to stop routing to a live object.
|
||||
|
||||
Returns ``None``, may raise ``DispatcherTypeError`` or
|
||||
``DispatcherKeyError``.
|
||||
"""
|
||||
if signal is None:
|
||||
raise error.DispatcherTypeError(
|
||||
'Signal cannot be None (receiver=%r sender=%r)'
|
||||
% (receiver, sender))
|
||||
if weak:
|
||||
receiver = saferef.safe_ref(receiver)
|
||||
senderkey = id(sender)
|
||||
try:
|
||||
signals = connections[senderkey]
|
||||
receivers = signals[signal]
|
||||
except KeyError:
|
||||
raise error.DispatcherKeyError(
|
||||
'No receivers found for signal %r from sender %r'
|
||||
% (signal, sender)
|
||||
)
|
||||
try:
|
||||
# also removes from receivers
|
||||
_remove_old_back_refs(senderkey, signal, receiver, receivers)
|
||||
except ValueError:
|
||||
raise error.DispatcherKeyError(
|
||||
'No connection to receiver %s for signal %s from sender %s'
|
||||
% (receiver, signal, sender)
|
||||
)
|
||||
_cleanup_connections(senderkey, signal)
|
||||
# Update stats.
|
||||
if __debug__:
|
||||
global disconnects
|
||||
disconnects += 1
|
||||
|
||||
|
||||
def get_receivers(sender=Any, signal=All):
|
||||
"""Get list of receivers from global tables.
|
||||
|
||||
This function allows you to retrieve the raw list of receivers
|
||||
from the connections table for the given sender and signal pair.
|
||||
|
||||
Note: There is no guarantee that this is the actual list stored in
|
||||
the connections table, so the value should be treated as a simple
|
||||
iterable/truth value rather than, for instance a list to which you
|
||||
might append new records.
|
||||
|
||||
Normally you would use ``live_receivers(get_receivers(...))`` to
|
||||
retrieve the actual receiver objects as an iterable object.
|
||||
"""
|
||||
try:
|
||||
return connections[id(sender)][signal]
|
||||
except KeyError:
|
||||
return []
|
||||
|
||||
|
||||
def live_receivers(receivers):
|
||||
"""Filter sequence of receivers to get resolved, live receivers.
|
||||
|
||||
This is a generator which will iterate over the passed sequence,
|
||||
checking for weak references and resolving them, then returning
|
||||
all live receivers.
|
||||
"""
|
||||
for receiver in receivers:
|
||||
if isinstance(receiver, WEAKREF_TYPES):
|
||||
# Dereference the weak reference.
|
||||
receiver = receiver()
|
||||
if receiver is not None:
|
||||
# Check installed plugins to make sure this receiver is
|
||||
# live.
|
||||
live = True
|
||||
for plugin in plugins:
|
||||
if not plugin.is_live(receiver):
|
||||
live = False
|
||||
break
|
||||
if live:
|
||||
yield receiver
|
||||
|
||||
|
||||
def get_all_receivers(sender=Any, signal=All):
|
||||
"""Get list of all receivers from global tables.
|
||||
|
||||
This gets all receivers which should receive the given signal from
|
||||
sender, each receiver should be produced only once by the
|
||||
resulting generator.
|
||||
"""
|
||||
yielded = set()
|
||||
for receivers in (
|
||||
# Get receivers that receive *this* signal from *this* sender.
|
||||
get_receivers(sender, signal),
|
||||
# Add receivers that receive *all* signals from *this* sender.
|
||||
get_receivers(sender, All),
|
||||
# Add receivers that receive *this* signal from *any* sender.
|
||||
get_receivers(Any, signal),
|
||||
# Add receivers that receive *all* signals from *any* sender.
|
||||
get_receivers(Any, All),
|
||||
):
|
||||
for receiver in receivers:
|
||||
if receiver: # filter out dead instance-method weakrefs
|
||||
try:
|
||||
if not receiver in yielded:
|
||||
yielded.add(receiver)
|
||||
yield receiver
|
||||
except TypeError:
|
||||
# dead weakrefs raise TypeError on hash...
|
||||
pass
|
||||
|
||||
|
||||
def send(signal=All, sender=Anonymous, *arguments, **named):
|
||||
"""Send ``signal`` from ``sender`` to all connected receivers.
|
||||
|
||||
- ``signal``: (Hashable) signal value; see ``connect`` for details.
|
||||
|
||||
- ``sender``: The sender of the signal.
|
||||
|
||||
If ``Any``, only receivers registered for ``Any`` will receive the
|
||||
message.
|
||||
|
||||
If ``Anonymous``, only receivers registered to receive messages
|
||||
from ``Anonymous`` or ``Any`` will receive the message.
|
||||
|
||||
Otherwise can be any Python object (normally one registered with
|
||||
a connect if you actually want something to occur).
|
||||
|
||||
- ``arguments``: Positional arguments which will be passed to *all*
|
||||
receivers. Note that this may raise ``TypeError`` if the receivers
|
||||
do not allow the particular arguments. Note also that arguments
|
||||
are applied before named arguments, so they should be used with
|
||||
care.
|
||||
|
||||
- ``named``: Named arguments which will be filtered according to the
|
||||
parameters of the receivers to only provide those acceptable to
|
||||
the receiver.
|
||||
|
||||
Return a list of tuple pairs ``[(receiver, response), ...]``
|
||||
|
||||
If any receiver raises an error, the error propagates back through
|
||||
send, terminating the dispatch loop, so it is quite possible to
|
||||
not have all receivers called if a raises an error.
|
||||
"""
|
||||
# Call each receiver with whatever arguments it can accept.
|
||||
# Return a list of tuple pairs [(receiver, response), ... ].
|
||||
responses = []
|
||||
for receiver in live_receivers(get_all_receivers(sender, signal)):
|
||||
# Wrap receiver using installed plugins.
|
||||
original = receiver
|
||||
for plugin in plugins:
|
||||
receiver = plugin.wrap_receiver(receiver)
|
||||
response = robustapply.robust_apply(
|
||||
receiver, original,
|
||||
signal=signal,
|
||||
sender=sender,
|
||||
*arguments,
|
||||
**named
|
||||
)
|
||||
responses.append((receiver, response))
|
||||
# Update stats.
|
||||
if __debug__:
|
||||
global sends
|
||||
sends += 1
|
||||
return responses
|
||||
|
||||
|
||||
def send_minimal(signal=All, sender=Anonymous, *arguments, **named):
|
||||
"""Like ``send``, but does not attach ``signal`` and ``sender``
|
||||
arguments to the call to the receiver."""
|
||||
# Call each receiver with whatever arguments it can accept.
|
||||
# Return a list of tuple pairs [(receiver, response), ... ].
|
||||
responses = []
|
||||
for receiver in live_receivers(get_all_receivers(sender, signal)):
|
||||
# Wrap receiver using installed plugins.
|
||||
original = receiver
|
||||
for plugin in plugins:
|
||||
receiver = plugin.wrap_receiver(receiver)
|
||||
response = robustapply.robust_apply(
|
||||
receiver, original,
|
||||
*arguments,
|
||||
**named
|
||||
)
|
||||
responses.append((receiver, response))
|
||||
# Update stats.
|
||||
if __debug__:
|
||||
global sends
|
||||
sends += 1
|
||||
return responses
|
||||
|
||||
|
||||
def send_exact(signal=All, sender=Anonymous, *arguments, **named):
|
||||
"""Send ``signal`` only to receivers registered for exact message.
|
||||
|
||||
``send_exact`` allows for avoiding ``Any``/``Anonymous`` registered
|
||||
handlers, sending only to those receivers explicitly registered
|
||||
for a particular signal on a particular sender.
|
||||
"""
|
||||
responses = []
|
||||
for receiver in live_receivers(get_receivers(sender, signal)):
|
||||
# Wrap receiver using installed plugins.
|
||||
original = receiver
|
||||
for plugin in plugins:
|
||||
receiver = plugin.wrap_receiver(receiver)
|
||||
response = robustapply.robust_apply(
|
||||
receiver, original,
|
||||
signal=signal,
|
||||
sender=sender,
|
||||
*arguments,
|
||||
**named
|
||||
)
|
||||
responses.append((receiver, response))
|
||||
return responses
|
||||
|
||||
|
||||
def send_robust(signal=All, sender=Anonymous, *arguments, **named):
|
||||
"""Send ``signal`` from ``sender`` to all connected receivers catching
|
||||
errors
|
||||
|
||||
- ``signal``: (Hashable) signal value, see connect for details
|
||||
|
||||
- ``sender``: The sender of the signal.
|
||||
|
||||
If ``Any``, only receivers registered for ``Any`` will receive the
|
||||
message.
|
||||
|
||||
If ``Anonymous``, only receivers registered to receive messages
|
||||
from ``Anonymous`` or ``Any`` will receive the message.
|
||||
|
||||
Otherwise can be any Python object (normally one registered with
|
||||
a connect if you actually want something to occur).
|
||||
|
||||
- ``arguments``: Positional arguments which will be passed to *all*
|
||||
receivers. Note that this may raise ``TypeError`` if the receivers
|
||||
do not allow the particular arguments. Note also that arguments
|
||||
are applied before named arguments, so they should be used with
|
||||
care.
|
||||
|
||||
- ``named``: Named arguments which will be filtered according to the
|
||||
parameters of the receivers to only provide those acceptable to
|
||||
the receiver.
|
||||
|
||||
Return a list of tuple pairs ``[(receiver, response), ... ]``
|
||||
|
||||
If any receiver raises an error (specifically, any subclass of
|
||||
``Exception``), the error instance is returned as the result for
|
||||
that receiver.
|
||||
"""
|
||||
# Call each receiver with whatever arguments it can accept.
|
||||
# Return a list of tuple pairs [(receiver, response), ... ].
|
||||
responses = []
|
||||
for receiver in live_receivers(get_all_receivers(sender, signal)):
|
||||
original = receiver
|
||||
for plugin in plugins:
|
||||
receiver = plugin.wrap_receiver(receiver)
|
||||
try:
|
||||
response = robustapply.robust_apply(
|
||||
receiver, original,
|
||||
signal=signal,
|
||||
sender=sender,
|
||||
*arguments,
|
||||
**named
|
||||
)
|
||||
except Exception, err:
|
||||
responses.append((receiver, err))
|
||||
else:
|
||||
responses.append((receiver, response))
|
||||
return responses
|
||||
|
||||
|
||||
def _remove_receiver(receiver):
|
||||
"""Remove ``receiver`` from connections."""
|
||||
if not senders_back:
|
||||
# During module cleanup the mapping will be replaced with None.
|
||||
return False
|
||||
backKey = id(receiver)
|
||||
for senderkey in senders_back.get(backKey, ()):
|
||||
try:
|
||||
signals = connections[senderkey].keys()
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
for signal in signals:
|
||||
try:
|
||||
receivers = connections[senderkey][signal]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
receivers.remove(receiver)
|
||||
except Exception:
|
||||
pass
|
||||
_cleanup_connections(senderkey, signal)
|
||||
try:
|
||||
del senders_back[backKey]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
def _cleanup_connections(senderkey, signal):
|
||||
"""Delete empty signals for ``senderkey``. Delete ``senderkey`` if
|
||||
empty."""
|
||||
try:
|
||||
receivers = connections[senderkey][signal]
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
if not receivers:
|
||||
# No more connected receivers. Therefore, remove the signal.
|
||||
try:
|
||||
signals = connections[senderkey]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
del signals[signal]
|
||||
if not signals:
|
||||
# No more signal connections. Therefore, remove the sender.
|
||||
_remove_sender(senderkey)
|
||||
|
||||
|
||||
def _remove_sender(senderkey):
|
||||
"""Remove ``senderkey`` from connections."""
|
||||
_remove_back_refs(senderkey)
|
||||
try:
|
||||
del connections[senderkey]
|
||||
except KeyError:
|
||||
pass
|
||||
# Senderkey will only be in senders dictionary if sender
|
||||
# could be weakly referenced.
|
||||
try:
|
||||
del senders[senderkey]
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def _remove_back_refs(senderkey):
|
||||
"""Remove all back-references to this ``senderkey``."""
|
||||
try:
|
||||
signals = connections[senderkey]
|
||||
except KeyError:
|
||||
signals = None
|
||||
else:
|
||||
for signal, receivers in signals.iteritems():
|
||||
for receiver in receivers:
|
||||
_kill_back_ref(receiver, senderkey)
|
||||
|
||||
|
||||
def _remove_old_back_refs(senderkey, signal, receiver, receivers):
|
||||
"""Kill old ``senders_back`` references from ``receiver``.
|
||||
|
||||
This guards against multiple registration of the same receiver for
|
||||
a given signal and sender leaking memory as old back reference
|
||||
records build up.
|
||||
|
||||
Also removes old receiver instance from receivers.
|
||||
"""
|
||||
try:
|
||||
index = receivers.index(receiver)
|
||||
# need to scan back references here and remove senderkey
|
||||
except ValueError:
|
||||
return False
|
||||
else:
|
||||
old_receiver = receivers[index]
|
||||
del receivers[index]
|
||||
found = 0
|
||||
signals = connections.get(signal)
|
||||
if signals is not None:
|
||||
for sig, recs in connections.get(signal, {}).iteritems():
|
||||
if sig != signal:
|
||||
for rec in recs:
|
||||
if rec is old_receiver:
|
||||
found = 1
|
||||
break
|
||||
if not found:
|
||||
_kill_back_ref(old_receiver, senderkey)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _kill_back_ref(receiver, senderkey):
|
||||
"""Do actual removal of back reference from ``receiver`` to
|
||||
``senderkey``."""
|
||||
receiverkey = id(receiver)
|
||||
senders = senders_back.get(receiverkey, ())
|
||||
while senderkey in senders:
|
||||
try:
|
||||
senders.remove(senderkey)
|
||||
except:
|
||||
break
|
||||
if not senders:
|
||||
try:
|
||||
del senders_back[receiverkey]
|
||||
except KeyError:
|
||||
pass
|
||||
return True
|
||||
|
||||
|
22
wlauto/external/louie/error.py
vendored
22
wlauto/external/louie/error.py
vendored
@ -1,22 +0,0 @@
|
||||
"""Error types for Louie."""
|
||||
|
||||
|
||||
class LouieError(Exception):
|
||||
"""Base class for all Louie errors"""
|
||||
|
||||
|
||||
class DispatcherError(LouieError):
|
||||
"""Base class for all Dispatcher errors"""
|
||||
|
||||
|
||||
class DispatcherKeyError(KeyError, DispatcherError):
|
||||
"""Error raised when unknown (sender, signal) specified"""
|
||||
|
||||
|
||||
class DispatcherTypeError(TypeError, DispatcherError):
|
||||
"""Error raised when inappropriate signal-type specified (None)"""
|
||||
|
||||
|
||||
class PluginTypeError(TypeError, LouieError):
|
||||
"""Error raise when trying to install more than one plugin of a
|
||||
certain type."""
|
108
wlauto/external/louie/plugin.py
vendored
108
wlauto/external/louie/plugin.py
vendored
@ -1,108 +0,0 @@
|
||||
"""Common plugins for Louie."""
|
||||
|
||||
from louie import dispatcher
|
||||
from louie import error
|
||||
|
||||
|
||||
def install_plugin(plugin):
|
||||
cls = plugin.__class__
|
||||
for p in dispatcher.plugins:
|
||||
if p.__class__ is cls:
|
||||
raise error.PluginTypeError(
|
||||
'Plugin of type %r already installed.' % cls)
|
||||
dispatcher.plugins.append(plugin)
|
||||
|
||||
def remove_plugin(plugin):
|
||||
dispatcher.plugins.remove(plugin)
|
||||
|
||||
|
||||
class Plugin(object):
|
||||
"""Base class for Louie plugins.
|
||||
|
||||
Plugins are used to extend or alter the behavior of Louie
|
||||
in a uniform way without having to modify the Louie code
|
||||
itself.
|
||||
"""
|
||||
|
||||
def is_live(self, receiver):
|
||||
"""Return True if the receiver is still live.
|
||||
|
||||
Only called for receivers who have already been determined to
|
||||
be live by default Louie semantics.
|
||||
"""
|
||||
return True
|
||||
|
||||
def wrap_receiver(self, receiver):
|
||||
"""Return a callable that passes arguments to the receiver.
|
||||
|
||||
Useful when you want to change the behavior of all receivers.
|
||||
"""
|
||||
return receiver
|
||||
|
||||
|
||||
class QtWidgetPlugin(Plugin):
|
||||
"""A Plugin for Louie that knows how to handle Qt widgets
|
||||
when using PyQt built with SIP 4 or higher.
|
||||
|
||||
Weak references are not useful when dealing with QWidget
|
||||
instances, because even after a QWidget is closed and destroyed,
|
||||
only the C++ object is destroyed. The Python 'shell' object
|
||||
remains, but raises a RuntimeError when an attempt is made to call
|
||||
an underlying QWidget method.
|
||||
|
||||
This plugin alleviates this behavior, and if a QWidget instance is
|
||||
found that is just an empty shell, it prevents Louie from
|
||||
dispatching to any methods on those objects.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
try:
|
||||
import qt
|
||||
except ImportError:
|
||||
self.is_live = self._is_live_no_qt
|
||||
else:
|
||||
self.qt = qt
|
||||
|
||||
def is_live(self, receiver):
|
||||
"""If receiver is a method on a QWidget, only return True if
|
||||
it hasn't been destroyed."""
|
||||
if (hasattr(receiver, 'im_self') and
|
||||
isinstance(receiver.im_self, self.qt.QWidget)
|
||||
):
|
||||
try:
|
||||
receiver.im_self.x()
|
||||
except RuntimeError:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _is_live_no_qt(self, receiver):
|
||||
return True
|
||||
|
||||
|
||||
class TwistedDispatchPlugin(Plugin):
|
||||
"""Plugin for Louie that wraps all receivers in callables
|
||||
that return Twisted Deferred objects.
|
||||
|
||||
When the wrapped receiver is called, it adds a call to the actual
|
||||
receiver to the reactor event loop, and returns a Deferred that is
|
||||
called back with the result.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# Don't import reactor ourselves, but make access to it
|
||||
# easier.
|
||||
from twisted import internet
|
||||
from twisted.internet.defer import Deferred
|
||||
self._internet = internet
|
||||
self._Deferred = Deferred
|
||||
|
||||
def wrap_receiver(self, receiver):
|
||||
def wrapper(*args, **kw):
|
||||
d = self._Deferred()
|
||||
def called(dummy):
|
||||
return receiver(*args, **kw)
|
||||
d.addCallback(called)
|
||||
self._internet.reactor.callLater(0, d.callback, None)
|
||||
return d
|
||||
return wrapper
|
||||
|
128
wlauto/external/louie/prioritylist.py
vendored
128
wlauto/external/louie/prioritylist.py
vendored
@ -1,128 +0,0 @@
|
||||
"""OrderedList class
|
||||
|
||||
This class keeps its elements ordered according to their priority.
|
||||
"""
|
||||
from collections import defaultdict
|
||||
import numbers
|
||||
from bisect import insort
|
||||
|
||||
class PriorityList(object):
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Returns an OrderedReceivers object that externaly behaves
|
||||
like a list but it maintains the order of its elements
|
||||
according to their priority.
|
||||
"""
|
||||
self.elements = defaultdict(list)
|
||||
self.is_ordered = True
|
||||
self.priorities = []
|
||||
self.size = 0
|
||||
self._cached_elements = None
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
this method makes PriorityList class iterable
|
||||
"""
|
||||
self._order_elements()
|
||||
for priority in reversed(self.priorities): # highest priority first
|
||||
for element in self.elements[priority]:
|
||||
yield element
|
||||
|
||||
def __getitem__(self, index):
|
||||
self._order_elements()
|
||||
return self._to_list()[index]
|
||||
|
||||
def __delitem__(self, index):
|
||||
self._order_elements()
|
||||
if isinstance(index, numbers.Integral):
|
||||
index = int(index)
|
||||
if index < 0:
|
||||
index_range = [len(self)+index]
|
||||
else:
|
||||
index_range = [index]
|
||||
elif isinstance(index, slice):
|
||||
index_range = range(index.start or 0, index.stop, index.step or 1)
|
||||
else:
|
||||
raise ValueError('Invalid index {}'.format(index))
|
||||
current_global_offset = 0
|
||||
priority_counts = {priority : count for (priority, count) in
|
||||
zip(self.priorities, [len(self.elements[p]) for p in self.priorities])}
|
||||
for priority in self.priorities:
|
||||
if not index_range:
|
||||
break
|
||||
priority_offset = 0
|
||||
while index_range:
|
||||
del_index = index_range[0]
|
||||
if priority_counts[priority] + current_global_offset <= del_index:
|
||||
current_global_offset += priority_counts[priority]
|
||||
break
|
||||
within_priority_index = del_index - (current_global_offset + priority_offset)
|
||||
self._delete(priority, within_priority_index)
|
||||
priority_offset += 1
|
||||
index_range.pop(0)
|
||||
|
||||
def __len__(self):
|
||||
return self.size
|
||||
|
||||
def add(self, new_element, priority=0, force_ordering=True):
|
||||
"""
|
||||
adds a new item in the list.
|
||||
|
||||
- ``new_element`` the element to be inserted in the PriorityList
|
||||
- ``priority`` is the priority of the element which specifies its
|
||||
order withing the List
|
||||
- ``force_ordering`` indicates whether elements should be ordered
|
||||
right now. If set to False, ordering happens on demand (lazy)
|
||||
"""
|
||||
self._add_element(new_element, priority)
|
||||
if priority not in self.priorities:
|
||||
self._add_priority(priority, force_ordering)
|
||||
|
||||
def index(self, element):
|
||||
return self._to_list().index(element)
|
||||
|
||||
def remove(self, element):
|
||||
index = self.index(element)
|
||||
self.__delitem__(index)
|
||||
|
||||
def _order_elements(self):
|
||||
if not self.is_ordered:
|
||||
self.priorities = sorted(self.priorities)
|
||||
self.is_ordered = True
|
||||
|
||||
def _to_list(self):
|
||||
if self._cached_elements == None:
|
||||
self._order_elements()
|
||||
self._cached_elements = []
|
||||
for priority in self.priorities:
|
||||
self._cached_elements += self.elements[priority]
|
||||
return self._cached_elements
|
||||
|
||||
def _add_element(self, element, priority):
|
||||
self.elements[priority].append(element)
|
||||
self.size += 1
|
||||
self._cached_elements = None
|
||||
|
||||
def _delete(self, priority, priority_index):
|
||||
del self.elements[priority][priority_index]
|
||||
self.size -= 1
|
||||
if len(self.elements[priority]) == 0:
|
||||
self.priorities.remove(priority)
|
||||
self._cached_elements = None
|
||||
|
||||
def _add_priority(self, priority, force_ordering):
|
||||
if force_ordering and self.is_ordered:
|
||||
insort(self.priorities, priority)
|
||||
elif not force_ordering:
|
||||
self.priorities.append(priority)
|
||||
self.is_ordered = False
|
||||
elif not self.is_ordered:
|
||||
self.priorities.append(priority)
|
||||
self._order_elements()
|
||||
else:
|
||||
raise AssertionError('Should never get here.')
|
||||
|
58
wlauto/external/louie/robustapply.py
vendored
58
wlauto/external/louie/robustapply.py
vendored
@ -1,58 +0,0 @@
|
||||
"""Robust apply mechanism.
|
||||
|
||||
Provides a function 'call', which can sort out what arguments a given
|
||||
callable object can take, and subset the given arguments to match only
|
||||
those which are acceptable.
|
||||
"""
|
||||
|
||||
def function(receiver):
|
||||
"""Get function-like callable object for given receiver.
|
||||
|
||||
returns (function_or_method, codeObject, fromMethod)
|
||||
|
||||
If fromMethod is true, then the callable already has its first
|
||||
argument bound.
|
||||
"""
|
||||
if hasattr(receiver, '__call__'):
|
||||
# receiver is a class instance; assume it is callable.
|
||||
# Reassign receiver to the actual method that will be called.
|
||||
c = receiver.__call__
|
||||
if hasattr(c, 'im_func') or hasattr(c, 'im_code'):
|
||||
receiver = c
|
||||
if hasattr(receiver, 'im_func'):
|
||||
# receiver is an instance-method.
|
||||
return receiver, receiver.im_func.func_code, 1
|
||||
elif not hasattr(receiver, 'func_code'):
|
||||
raise ValueError(
|
||||
'unknown reciever type %s %s' % (receiver, type(receiver)))
|
||||
return receiver, receiver.func_code, 0
|
||||
|
||||
|
||||
def robust_apply(receiver, signature, *arguments, **named):
|
||||
"""Call receiver with arguments and appropriate subset of named.
|
||||
``signature`` is the callable used to determine the call signature
|
||||
of the receiver, in case ``receiver`` is a callable wrapper of the
|
||||
actual receiver."""
|
||||
signature, code_object, startIndex = function(signature)
|
||||
acceptable = code_object.co_varnames[
|
||||
startIndex + len(arguments):
|
||||
code_object.co_argcount
|
||||
]
|
||||
for name in code_object.co_varnames[
|
||||
startIndex:startIndex + len(arguments)
|
||||
]:
|
||||
if named.has_key(name):
|
||||
raise TypeError(
|
||||
'Argument %r specified both positionally '
|
||||
'and as a keyword for calling %r'
|
||||
% (name, signature)
|
||||
)
|
||||
if not (code_object.co_flags & 8):
|
||||
# fc does not have a **kwds type parameter, therefore
|
||||
# remove unacceptable arguments.
|
||||
for arg in named.keys():
|
||||
if arg not in acceptable:
|
||||
del named[arg]
|
||||
return receiver(*arguments, **named)
|
||||
|
||||
|
179
wlauto/external/louie/saferef.py
vendored
179
wlauto/external/louie/saferef.py
vendored
@ -1,179 +0,0 @@
|
||||
"""Refactored 'safe reference from dispatcher.py"""
|
||||
|
||||
import weakref
|
||||
import traceback
|
||||
|
||||
|
||||
def safe_ref(target, on_delete=None):
|
||||
"""Return a *safe* weak reference to a callable target.
|
||||
|
||||
- ``target``: The object to be weakly referenced, if it's a bound
|
||||
method reference, will create a BoundMethodWeakref, otherwise
|
||||
creates a simple weakref.
|
||||
|
||||
- ``on_delete``: If provided, will have a hard reference stored to
|
||||
the callable to be called after the safe reference goes out of
|
||||
scope with the reference object, (either a weakref or a
|
||||
BoundMethodWeakref) as argument.
|
||||
"""
|
||||
if hasattr(target, 'im_self'):
|
||||
if target.im_self is not None:
|
||||
# Turn a bound method into a BoundMethodWeakref instance.
|
||||
# Keep track of these instances for lookup by disconnect().
|
||||
assert hasattr(target, 'im_func'), (
|
||||
"safe_ref target %r has im_self, but no im_func, "
|
||||
"don't know how to create reference"
|
||||
% target
|
||||
)
|
||||
reference = BoundMethodWeakref(target=target, on_delete=on_delete)
|
||||
return reference
|
||||
if callable(on_delete):
|
||||
return weakref.ref(target, on_delete)
|
||||
else:
|
||||
return weakref.ref(target)
|
||||
|
||||
|
||||
class BoundMethodWeakref(object):
|
||||
"""'Safe' and reusable weak references to instance methods.
|
||||
|
||||
BoundMethodWeakref objects provide a mechanism for referencing a
|
||||
bound method without requiring that the method object itself
|
||||
(which is normally a transient object) is kept alive. Instead,
|
||||
the BoundMethodWeakref object keeps weak references to both the
|
||||
object and the function which together define the instance method.
|
||||
|
||||
Attributes:
|
||||
|
||||
- ``key``: The identity key for the reference, calculated by the
|
||||
class's calculate_key method applied to the target instance method.
|
||||
|
||||
- ``deletion_methods``: Sequence of callable objects taking single
|
||||
argument, a reference to this object which will be called when
|
||||
*either* the target object or target function is garbage
|
||||
collected (i.e. when this object becomes invalid). These are
|
||||
specified as the on_delete parameters of safe_ref calls.
|
||||
|
||||
- ``weak_self``: Weak reference to the target object.
|
||||
|
||||
- ``weak_func``: Weak reference to the target function.
|
||||
|
||||
Class Attributes:
|
||||
|
||||
- ``_all_instances``: Class attribute pointing to all live
|
||||
BoundMethodWeakref objects indexed by the class's
|
||||
calculate_key(target) method applied to the target objects.
|
||||
This weak value dictionary is used to short-circuit creation so
|
||||
that multiple references to the same (object, function) pair
|
||||
produce the same BoundMethodWeakref instance.
|
||||
"""
|
||||
|
||||
_all_instances = weakref.WeakValueDictionary()
|
||||
|
||||
def __new__(cls, target, on_delete=None, *arguments, **named):
|
||||
"""Create new instance or return current instance.
|
||||
|
||||
Basically this method of construction allows us to
|
||||
short-circuit creation of references to already- referenced
|
||||
instance methods. The key corresponding to the target is
|
||||
calculated, and if there is already an existing reference,
|
||||
that is returned, with its deletion_methods attribute updated.
|
||||
Otherwise the new instance is created and registered in the
|
||||
table of already-referenced methods.
|
||||
"""
|
||||
key = cls.calculate_key(target)
|
||||
current = cls._all_instances.get(key)
|
||||
if current is not None:
|
||||
current.deletion_methods.append(on_delete)
|
||||
return current
|
||||
else:
|
||||
base = super(BoundMethodWeakref, cls).__new__(cls)
|
||||
cls._all_instances[key] = base
|
||||
base.__init__(target, on_delete, *arguments, **named)
|
||||
return base
|
||||
|
||||
def __init__(self, target, on_delete=None):
|
||||
"""Return a weak-reference-like instance for a bound method.
|
||||
|
||||
- ``target``: The instance-method target for the weak reference,
|
||||
must have im_self and im_func attributes and be
|
||||
reconstructable via the following, which is true of built-in
|
||||
instance methods::
|
||||
|
||||
target.im_func.__get__( target.im_self )
|
||||
|
||||
- ``on_delete``: Optional callback which will be called when
|
||||
this weak reference ceases to be valid (i.e. either the
|
||||
object or the function is garbage collected). Should take a
|
||||
single argument, which will be passed a pointer to this
|
||||
object.
|
||||
"""
|
||||
def remove(weak, self=self):
|
||||
"""Set self.isDead to True when method or instance is destroyed."""
|
||||
methods = self.deletion_methods[:]
|
||||
del self.deletion_methods[:]
|
||||
try:
|
||||
del self.__class__._all_instances[self.key]
|
||||
except KeyError:
|
||||
pass
|
||||
for function in methods:
|
||||
try:
|
||||
if callable(function):
|
||||
function(self)
|
||||
except Exception:
|
||||
try:
|
||||
traceback.print_exc()
|
||||
except AttributeError, e:
|
||||
print ('Exception during saferef %s '
|
||||
'cleanup function %s: %s' % (self, function, e))
|
||||
self.deletion_methods = [on_delete]
|
||||
self.key = self.calculate_key(target)
|
||||
self.weak_self = weakref.ref(target.im_self, remove)
|
||||
self.weak_func = weakref.ref(target.im_func, remove)
|
||||
self.self_name = str(target.im_self)
|
||||
self.func_name = str(target.im_func.__name__)
|
||||
|
||||
def calculate_key(cls, target):
|
||||
"""Calculate the reference key for this reference.
|
||||
|
||||
Currently this is a two-tuple of the id()'s of the target
|
||||
object and the target function respectively.
|
||||
"""
|
||||
return (id(target.im_self), id(target.im_func))
|
||||
calculate_key = classmethod(calculate_key)
|
||||
|
||||
def __str__(self):
|
||||
"""Give a friendly representation of the object."""
|
||||
return "%s(%s.%s)" % (
|
||||
self.__class__.__name__,
|
||||
self.self_name,
|
||||
self.func_name,
|
||||
)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
def __nonzero__(self):
|
||||
"""Whether we are still a valid reference."""
|
||||
return self() is not None
|
||||
|
||||
def __cmp__(self, other):
|
||||
"""Compare with another reference."""
|
||||
if not isinstance(other, self.__class__):
|
||||
return cmp(self.__class__, type(other))
|
||||
return cmp(self.key, other.key)
|
||||
|
||||
def __call__(self):
|
||||
"""Return a strong reference to the bound method.
|
||||
|
||||
If the target cannot be retrieved, then will return None,
|
||||
otherwise returns a bound instance method for our object and
|
||||
function.
|
||||
|
||||
Note: You may call this method any number of times, as it does
|
||||
not invalidate the reference.
|
||||
"""
|
||||
target = self.weak_self()
|
||||
if target is not None:
|
||||
function = self.weak_func()
|
||||
if function is not None:
|
||||
return function.__get__(target)
|
||||
return None
|
39
wlauto/external/louie/sender.py
vendored
39
wlauto/external/louie/sender.py
vendored
@ -1,39 +0,0 @@
|
||||
"""Sender classes."""
|
||||
|
||||
|
||||
class _SENDER(type):
|
||||
"""Base metaclass for sender classes."""
|
||||
|
||||
def __str__(cls):
|
||||
return '<Sender: %s>' % (cls.__name__, )
|
||||
|
||||
|
||||
class Any(object):
|
||||
"""Used to represent either 'any sender'.
|
||||
|
||||
The Any class can be used with connect, disconnect, send, or
|
||||
sendExact to denote that the sender paramater should react to any
|
||||
sender, not just a particular sender.
|
||||
"""
|
||||
|
||||
__metaclass__ = _SENDER
|
||||
|
||||
|
||||
class Anonymous(object):
|
||||
"""Singleton used to signal 'anonymous sender'.
|
||||
|
||||
The Anonymous class is used to signal that the sender of a message
|
||||
is not specified (as distinct from being 'any sender').
|
||||
Registering callbacks for Anonymous will only receive messages
|
||||
sent without senders. Sending with anonymous will only send
|
||||
messages to those receivers registered for Any or Anonymous.
|
||||
|
||||
Note: The default sender for connect is Any, while the default
|
||||
sender for send is Anonymous. This has the effect that if you do
|
||||
not specify any senders in either function then all messages are
|
||||
routed as though there was a single sender (Anonymous) being used
|
||||
everywhere.
|
||||
"""
|
||||
|
||||
__metaclass__ = _SENDER
|
||||
|
30
wlauto/external/louie/signal.py
vendored
30
wlauto/external/louie/signal.py
vendored
@ -1,30 +0,0 @@
|
||||
"""Signal class.
|
||||
|
||||
This class is provided as a way to consistently define and document
|
||||
signal types. Signal classes also have a useful string
|
||||
representation.
|
||||
|
||||
Louie does not require you to use a subclass of Signal for signals.
|
||||
"""
|
||||
|
||||
|
||||
class _SIGNAL(type):
|
||||
"""Base metaclass for signal classes."""
|
||||
|
||||
def __str__(cls):
|
||||
return '<Signal: %s>' % (cls.__name__, )
|
||||
|
||||
|
||||
class Signal(object):
|
||||
|
||||
__metaclass__ = _SIGNAL
|
||||
|
||||
|
||||
class All(Signal):
|
||||
"""Used to represent 'all signals'.
|
||||
|
||||
The All class can be used with connect, disconnect, send, or
|
||||
sendExact to denote that the signal should react to all signals,
|
||||
not just a particular signal.
|
||||
"""
|
||||
|
0
wlauto/external/louie/test/__init__.py
vendored
0
wlauto/external/louie/test/__init__.py
vendored
5
wlauto/external/louie/test/conftest.py
vendored
5
wlauto/external/louie/test/conftest.py
vendored
@ -1,5 +0,0 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
|
||||
|
0
wlauto/external/louie/test/fixture.py
vendored
0
wlauto/external/louie/test/fixture.py
vendored
154
wlauto/external/louie/test/test_dispatcher.py
vendored
154
wlauto/external/louie/test/test_dispatcher.py
vendored
@ -1,154 +0,0 @@
|
||||
import unittest
|
||||
|
||||
import louie
|
||||
from louie import dispatcher
|
||||
|
||||
|
||||
def x(a):
|
||||
return a
|
||||
|
||||
|
||||
class Dummy(object):
|
||||
pass
|
||||
|
||||
|
||||
class Callable(object):
|
||||
|
||||
def __call__(self, a):
|
||||
return a
|
||||
|
||||
def a(self, a):
|
||||
return a
|
||||
|
||||
|
||||
class TestDispatcher(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
louie.reset()
|
||||
|
||||
def _isclean(self):
|
||||
"""Assert that everything has been cleaned up automatically"""
|
||||
assert len(dispatcher.senders_back) == 0, dispatcher.senders_back
|
||||
assert len(dispatcher.connections) == 0, dispatcher.connections
|
||||
assert len(dispatcher.senders) == 0, dispatcher.senders
|
||||
|
||||
def test_Exact(self):
|
||||
a = Dummy()
|
||||
signal = 'this'
|
||||
louie.connect(x, signal, a)
|
||||
expected = [(x, a)]
|
||||
result = louie.send('this', a, a=a)
|
||||
assert result == expected, (
|
||||
"Send didn't return expected result:\n\texpected:%s\n\tgot:%s"
|
||||
% (expected, result))
|
||||
louie.disconnect(x, signal, a)
|
||||
assert len(list(louie.get_all_receivers(a, signal))) == 0
|
||||
self._isclean()
|
||||
|
||||
def test_AnonymousSend(self):
|
||||
a = Dummy()
|
||||
signal = 'this'
|
||||
louie.connect(x, signal)
|
||||
expected = [(x, a)]
|
||||
result = louie.send(signal, None, a=a)
|
||||
assert result == expected, (
|
||||
"Send didn't return expected result:\n\texpected:%s\n\tgot:%s"
|
||||
% (expected, result))
|
||||
louie.disconnect(x, signal)
|
||||
assert len(list(louie.get_all_receivers(None, signal))) == 0
|
||||
self._isclean()
|
||||
|
||||
def test_AnyRegistration(self):
|
||||
a = Dummy()
|
||||
signal = 'this'
|
||||
louie.connect(x, signal, louie.Any)
|
||||
expected = [(x, a)]
|
||||
result = louie.send('this', object(), a=a)
|
||||
assert result == expected, (
|
||||
"Send didn't return expected result:\n\texpected:%s\n\tgot:%s"
|
||||
% (expected, result))
|
||||
louie.disconnect(x, signal, louie.Any)
|
||||
expected = []
|
||||
result = louie.send('this', object(), a=a)
|
||||
assert result == expected, (
|
||||
"Send didn't return expected result:\n\texpected:%s\n\tgot:%s"
|
||||
% (expected, result))
|
||||
assert len(list(louie.get_all_receivers(louie.Any, signal))) == 0
|
||||
self._isclean()
|
||||
|
||||
def test_AllRegistration(self):
|
||||
a = Dummy()
|
||||
signal = 'this'
|
||||
louie.connect(x, louie.All, a)
|
||||
expected = [(x, a)]
|
||||
result = louie.send('this', a, a=a)
|
||||
assert result == expected, (
|
||||
"Send didn't return expected result:\n\texpected:%s\n\tgot:%s"
|
||||
% (expected, result))
|
||||
louie.disconnect(x, louie.All, a)
|
||||
assert len(list(louie.get_all_receivers(a, louie.All))) == 0
|
||||
self._isclean()
|
||||
|
||||
def test_GarbageCollected(self):
|
||||
a = Callable()
|
||||
b = Dummy()
|
||||
signal = 'this'
|
||||
louie.connect(a.a, signal, b)
|
||||
expected = []
|
||||
del a
|
||||
result = louie.send('this', b, a=b)
|
||||
assert result == expected, (
|
||||
"Send didn't return expected result:\n\texpected:%s\n\tgot:%s"
|
||||
% (expected, result))
|
||||
assert len(list(louie.get_all_receivers(b, signal))) == 0, (
|
||||
"Remaining handlers: %s" % (louie.get_all_receivers(b, signal),))
|
||||
self._isclean()
|
||||
|
||||
def test_GarbageCollectedObj(self):
|
||||
class x:
|
||||
def __call__(self, a):
|
||||
return a
|
||||
a = Callable()
|
||||
b = Dummy()
|
||||
signal = 'this'
|
||||
louie.connect(a, signal, b)
|
||||
expected = []
|
||||
del a
|
||||
result = louie.send('this', b, a=b)
|
||||
assert result == expected, (
|
||||
"Send didn't return expected result:\n\texpected:%s\n\tgot:%s"
|
||||
% (expected, result))
|
||||
assert len(list(louie.get_all_receivers(b, signal))) == 0, (
|
||||
"Remaining handlers: %s" % (louie.get_all_receivers(b, signal),))
|
||||
self._isclean()
|
||||
|
||||
def test_MultipleRegistration(self):
|
||||
a = Callable()
|
||||
b = Dummy()
|
||||
signal = 'this'
|
||||
louie.connect(a, signal, b)
|
||||
louie.connect(a, signal, b)
|
||||
louie.connect(a, signal, b)
|
||||
louie.connect(a, signal, b)
|
||||
louie.connect(a, signal, b)
|
||||
louie.connect(a, signal, b)
|
||||
result = louie.send('this', b, a=b)
|
||||
assert len(result) == 1, result
|
||||
assert len(list(louie.get_all_receivers(b, signal))) == 1, (
|
||||
"Remaining handlers: %s" % (louie.get_all_receivers(b, signal),))
|
||||
del a
|
||||
del b
|
||||
del result
|
||||
self._isclean()
|
||||
|
||||
def test_robust(self):
|
||||
"""Test the sendRobust function."""
|
||||
def fails():
|
||||
raise ValueError('this')
|
||||
a = object()
|
||||
signal = 'this'
|
||||
louie.connect(fails, louie.All, a)
|
||||
result = louie.send_robust('this', a, a=a)
|
||||
err = result[0][1]
|
||||
assert isinstance(err, ValueError)
|
||||
assert err.args == ('this', )
|
145
wlauto/external/louie/test/test_plugin.py
vendored
145
wlauto/external/louie/test/test_plugin.py
vendored
@ -1,145 +0,0 @@
|
||||
"""Louie plugin tests."""
|
||||
|
||||
import unittest
|
||||
|
||||
import louie
|
||||
|
||||
try:
|
||||
import qt
|
||||
if not hasattr(qt.qApp, 'for_testing'):
|
||||
_app = qt.QApplication([])
|
||||
_app.for_testing = True
|
||||
qt.qApp = _app
|
||||
except ImportError:
|
||||
qt = None
|
||||
|
||||
|
||||
class ReceiverBase(object):
|
||||
|
||||
def __init__(self):
|
||||
self.args = []
|
||||
self.live = True
|
||||
|
||||
def __call__(self, arg):
|
||||
self.args.append(arg)
|
||||
|
||||
class Receiver1(ReceiverBase):
|
||||
pass
|
||||
|
||||
class Receiver2(ReceiverBase):
|
||||
pass
|
||||
|
||||
|
||||
class Plugin1(louie.Plugin):
|
||||
|
||||
def is_live(self, receiver):
|
||||
"""ReceiverBase instances are only live if their `live`
|
||||
attribute is True"""
|
||||
if isinstance(receiver, ReceiverBase):
|
||||
return receiver.live
|
||||
return True
|
||||
|
||||
|
||||
class Plugin2(louie.Plugin):
|
||||
|
||||
def is_live(self, receiver):
|
||||
"""Pretend all Receiver2 instances are not live."""
|
||||
if isinstance(receiver, Receiver2):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def test_only_one_instance():
|
||||
louie.reset()
|
||||
plugin1a = Plugin1()
|
||||
plugin1b = Plugin1()
|
||||
louie.install_plugin(plugin1a)
|
||||
# XXX: Move these tests into test cases so we can use unittest's
|
||||
# 'assertRaises' method.
|
||||
try:
|
||||
louie.install_plugin(plugin1b)
|
||||
except louie.error.PluginTypeError:
|
||||
pass
|
||||
else:
|
||||
raise Exception('PluginTypeError not raised')
|
||||
|
||||
|
||||
def test_is_live():
|
||||
louie.reset()
|
||||
# Create receivers.
|
||||
receiver1a = Receiver1()
|
||||
receiver1b = Receiver1()
|
||||
receiver2a = Receiver2()
|
||||
receiver2b = Receiver2()
|
||||
# Connect signals.
|
||||
louie.connect(receiver1a, 'sig')
|
||||
louie.connect(receiver1b, 'sig')
|
||||
louie.connect(receiver2a, 'sig')
|
||||
louie.connect(receiver2b, 'sig')
|
||||
# Check reception without plugins.
|
||||
louie.send('sig', arg='foo')
|
||||
assert receiver1a.args == ['foo']
|
||||
assert receiver1b.args == ['foo']
|
||||
assert receiver2a.args == ['foo']
|
||||
assert receiver2b.args == ['foo']
|
||||
# Install plugin 1.
|
||||
plugin1 = Plugin1()
|
||||
louie.install_plugin(plugin1)
|
||||
# Make some receivers not live.
|
||||
receiver1a.live = False
|
||||
receiver2b.live = False
|
||||
# Check reception.
|
||||
louie.send('sig', arg='bar')
|
||||
assert receiver1a.args == ['foo']
|
||||
assert receiver1b.args == ['foo', 'bar']
|
||||
assert receiver2a.args == ['foo', 'bar']
|
||||
assert receiver2b.args == ['foo']
|
||||
# Remove plugin 1, install plugin 2.
|
||||
plugin2 = Plugin2()
|
||||
louie.remove_plugin(plugin1)
|
||||
louie.install_plugin(plugin2)
|
||||
# Check reception.
|
||||
louie.send('sig', arg='baz')
|
||||
assert receiver1a.args == ['foo', 'baz']
|
||||
assert receiver1b.args == ['foo', 'bar', 'baz']
|
||||
assert receiver2a.args == ['foo', 'bar']
|
||||
assert receiver2b.args == ['foo']
|
||||
# Install plugin 1 alongside plugin 2.
|
||||
louie.install_plugin(plugin1)
|
||||
# Check reception.
|
||||
louie.send('sig', arg='fob')
|
||||
assert receiver1a.args == ['foo', 'baz']
|
||||
assert receiver1b.args == ['foo', 'bar', 'baz', 'fob']
|
||||
assert receiver2a.args == ['foo', 'bar']
|
||||
assert receiver2b.args == ['foo']
|
||||
|
||||
|
||||
if qt is not None:
|
||||
def test_qt_plugin():
|
||||
louie.reset()
|
||||
# Create receivers.
|
||||
class Receiver(qt.QWidget):
|
||||
def __init__(self):
|
||||
qt.QObject.__init__(self)
|
||||
self.args = []
|
||||
def receive(self, arg):
|
||||
self.args.append(arg)
|
||||
receiver1 = Receiver()
|
||||
receiver2 = Receiver()
|
||||
# Connect signals.
|
||||
louie.connect(receiver1.receive, 'sig')
|
||||
louie.connect(receiver2.receive, 'sig')
|
||||
# Destroy receiver2 so only a shell is left.
|
||||
receiver2.close(True)
|
||||
# Check reception without plugins.
|
||||
louie.send('sig', arg='foo')
|
||||
assert receiver1.args == ['foo']
|
||||
assert receiver2.args == ['foo']
|
||||
# Install plugin.
|
||||
plugin = louie.QtWidgetPlugin()
|
||||
louie.install_plugin(plugin)
|
||||
# Check reception with plugins.
|
||||
louie.send('sig', arg='bar')
|
||||
assert receiver1.args == ['foo', 'bar']
|
||||
assert receiver2.args == ['foo']
|
||||
|
@ -1,41 +0,0 @@
|
||||
import unittest
|
||||
|
||||
import louie
|
||||
from louie import dispatcher
|
||||
|
||||
class Callable(object):
|
||||
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
|
||||
def __call__(self):
|
||||
return self.val
|
||||
|
||||
|
||||
one = Callable(1)
|
||||
two = Callable(2)
|
||||
three = Callable(3)
|
||||
|
||||
class TestPriorityDispatcher(unittest.TestCase):
|
||||
|
||||
def test_ConnectNotify(self):
|
||||
louie.connect(
|
||||
two,
|
||||
'one',
|
||||
priority=200
|
||||
)
|
||||
louie.connect(
|
||||
one,
|
||||
'one',
|
||||
priority=100
|
||||
)
|
||||
louie.connect(
|
||||
three,
|
||||
'one',
|
||||
priority=300
|
||||
)
|
||||
result = [ i[1] for i in louie.send('one')]
|
||||
if not result == [1, 2, 3]:
|
||||
print result
|
||||
assert(False)
|
||||
|
62
wlauto/external/louie/test/test_prioritylist.py
vendored
62
wlauto/external/louie/test/test_prioritylist.py
vendored
@ -1,62 +0,0 @@
|
||||
import unittest
|
||||
|
||||
import louie.prioritylist
|
||||
from louie.prioritylist import PriorityList
|
||||
|
||||
#def populate_list(plist):
|
||||
|
||||
class TestPriorityList(unittest.TestCase):
|
||||
|
||||
def test_Insert(self):
|
||||
pl = PriorityList()
|
||||
elements = {3: "element 3",
|
||||
2: "element 2",
|
||||
1: "element 1",
|
||||
5: "element 5",
|
||||
4: "element 4"
|
||||
}
|
||||
for key in elements:
|
||||
pl.add(elements[key], priority=key)
|
||||
|
||||
match = zip(sorted(elements.values()), pl[:])
|
||||
for pair in match:
|
||||
assert(pair[0]==pair[1])
|
||||
|
||||
def test_Delete(self):
|
||||
pl = PriorityList()
|
||||
elements = {2: "element 3",
|
||||
1: "element 2",
|
||||
0: "element 1",
|
||||
4: "element 5",
|
||||
3: "element 4"
|
||||
}
|
||||
for key in elements:
|
||||
pl.add(elements[key], priority=key)
|
||||
del elements[2]
|
||||
del pl[2]
|
||||
match = zip(sorted(elements.values()) , pl[:])
|
||||
for pair in match:
|
||||
assert(pair[0]==pair[1])
|
||||
|
||||
def test_Multiple(self):
|
||||
pl = PriorityList()
|
||||
pl.add('1', 1)
|
||||
pl.add('2.1', 2)
|
||||
pl.add('3', 3)
|
||||
pl.add('2.2', 2)
|
||||
it = iter(pl)
|
||||
assert(it.next() == '1')
|
||||
assert(it.next() == '2.1')
|
||||
assert(it.next() == '2.2')
|
||||
assert(it.next() == '3')
|
||||
|
||||
def test_IteratorBreak(self):
|
||||
pl = PriorityList()
|
||||
pl.add('1', 1)
|
||||
pl.add('2.1', 2)
|
||||
pl.add('3', 3)
|
||||
pl.add('2.2', 2)
|
||||
for i in pl:
|
||||
if i == '2.1':
|
||||
break
|
||||
assert(pl.index('3') == 3)
|
34
wlauto/external/louie/test/test_robustapply.py
vendored
34
wlauto/external/louie/test/test_robustapply.py
vendored
@ -1,34 +0,0 @@
|
||||
import unittest
|
||||
|
||||
from louie.robustapply import robust_apply
|
||||
|
||||
|
||||
def no_argument():
|
||||
pass
|
||||
|
||||
|
||||
def one_argument(blah):
|
||||
pass
|
||||
|
||||
|
||||
def two_arguments(blah, other):
|
||||
pass
|
||||
|
||||
|
||||
class TestRobustApply(unittest.TestCase):
|
||||
|
||||
def test_01(self):
|
||||
robust_apply(no_argument, no_argument)
|
||||
|
||||
def test_02(self):
|
||||
self.assertRaises(TypeError, robust_apply, no_argument, no_argument,
|
||||
'this' )
|
||||
|
||||
def test_03(self):
|
||||
self.assertRaises(TypeError, robust_apply, one_argument, one_argument)
|
||||
|
||||
def test_04(self):
|
||||
"""Raise error on duplication of a particular argument"""
|
||||
self.assertRaises(TypeError, robust_apply, one_argument, one_argument,
|
||||
'this', blah='that')
|
||||
|
83
wlauto/external/louie/test/test_saferef.py
vendored
83
wlauto/external/louie/test/test_saferef.py
vendored
@ -1,83 +0,0 @@
|
||||
import unittest
|
||||
|
||||
from louie.saferef import safe_ref
|
||||
|
||||
|
||||
class _Sample1(object):
|
||||
def x(self):
|
||||
pass
|
||||
|
||||
|
||||
def _sample2(obj):
|
||||
pass
|
||||
|
||||
|
||||
class _Sample3(object):
|
||||
def __call__(self, obj):
|
||||
pass
|
||||
|
||||
|
||||
class TestSaferef(unittest.TestCase):
|
||||
|
||||
# XXX: The original tests had a test for closure, and it had an
|
||||
# off-by-one problem, perhaps due to scope issues. It has been
|
||||
# removed from this test suite.
|
||||
|
||||
def setUp(self):
|
||||
ts = []
|
||||
ss = []
|
||||
for x in xrange(5000):
|
||||
t = _Sample1()
|
||||
ts.append(t)
|
||||
s = safe_ref(t.x, self._closure)
|
||||
ss.append(s)
|
||||
ts.append(_sample2)
|
||||
ss.append(safe_ref(_sample2, self._closure))
|
||||
for x in xrange(30):
|
||||
t = _Sample3()
|
||||
ts.append(t)
|
||||
s = safe_ref(t, self._closure)
|
||||
ss.append(s)
|
||||
self.ts = ts
|
||||
self.ss = ss
|
||||
self.closure_count = 0
|
||||
|
||||
def tearDown(self):
|
||||
if hasattr(self, 'ts'):
|
||||
del self.ts
|
||||
if hasattr(self, 'ss'):
|
||||
del self.ss
|
||||
|
||||
def test_In(self):
|
||||
"""Test the `in` operator for safe references (cmp)"""
|
||||
for t in self.ts[:50]:
|
||||
assert safe_ref(t.x) in self.ss
|
||||
|
||||
def test_Valid(self):
|
||||
"""Test that the references are valid (return instance methods)"""
|
||||
for s in self.ss:
|
||||
assert s()
|
||||
|
||||
def test_ShortCircuit(self):
|
||||
"""Test that creation short-circuits to reuse existing references"""
|
||||
sd = {}
|
||||
for s in self.ss:
|
||||
sd[s] = 1
|
||||
for t in self.ts:
|
||||
if hasattr(t, 'x'):
|
||||
assert sd.has_key(safe_ref(t.x))
|
||||
else:
|
||||
assert sd.has_key(safe_ref(t))
|
||||
|
||||
def test_Representation(self):
|
||||
"""Test that the reference object's representation works
|
||||
|
||||
XXX Doesn't currently check the results, just that no error
|
||||
is raised
|
||||
"""
|
||||
repr(self.ss[-1])
|
||||
|
||||
def _closure(self, ref):
|
||||
"""Dumb utility mechanism to increment deletion counter"""
|
||||
self.closure_count += 1
|
||||
|
8
wlauto/external/louie/version.py
vendored
8
wlauto/external/louie/version.py
vendored
@ -1,8 +0,0 @@
|
||||
"""Louie version information."""
|
||||
|
||||
|
||||
NAME = 'Louie'
|
||||
DESCRIPTION = 'Signal dispatching mechanism'
|
||||
VERSION = '1.1'
|
||||
|
||||
|
@ -54,7 +54,7 @@ class StreamlineResourceGetter(ResourceGetter):
|
||||
priority = GetterPriority.environment + 1 # run before standard enviroment resolvers.
|
||||
|
||||
dependencies_directory = os.path.join(settings.dependencies_directory, 'streamline')
|
||||
old_dependencies_directory = os.path.join(settings.environment_root, 'streamline') # backwards compatibility
|
||||
#old_dependencies_directory = os.path.join(settings.environment_root, 'streamline') # backwards compatibility
|
||||
|
||||
def get(self, resource, **kwargs):
|
||||
if resource.owner.name != 'streamline':
|
||||
@ -62,9 +62,9 @@ class StreamlineResourceGetter(ResourceGetter):
|
||||
test_path = _f(os.path.join(self.dependencies_directory, resource.path))
|
||||
if os.path.isfile(test_path):
|
||||
return test_path
|
||||
test_path = _f(os.path.join(self.old_dependencies_directory, resource.path))
|
||||
if os.path.isfile(test_path):
|
||||
return test_path
|
||||
#test_path = _f(os.path.join(self.old_dependencies_directory, resource.path))
|
||||
#if os.path.isfile(test_path):
|
||||
# return test_path
|
||||
|
||||
|
||||
def _instantiate(resolver):
|
||||
|
14
wlauto/managers/__init__.py
Normal file
14
wlauto/managers/__init__.py
Normal file
@ -0,0 +1,14 @@
|
||||
# Copyright 2014-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
@ -42,38 +42,39 @@ class PackageFileGetter(ResourceGetter):
|
||||
|
||||
name = 'package_file'
|
||||
description = """
|
||||
Looks for exactly one file with the specified extension in the owner's directory. If a version
|
||||
Looks for exactly one file with the specified plugin in the owner's directory. If a version
|
||||
is specified on invocation of get, it will filter the discovered file based on that version.
|
||||
Versions are treated as case-insensitive.
|
||||
"""
|
||||
|
||||
extension = None
|
||||
plugin = None
|
||||
|
||||
def register(self):
|
||||
self.resolver.register(self, self.extension, GetterPriority.package)
|
||||
self.resolver.register(self, self.plugin, GetterPriority.package)
|
||||
|
||||
def get(self, resource, **kwargs):
|
||||
resource_dir = os.path.dirname(sys.modules[resource.owner.__module__].__file__)
|
||||
version = kwargs.get('version')
|
||||
return get_from_location_by_extension(resource, resource_dir, self.extension, version)
|
||||
return get_from_location_by_plugin(resource, resource_dir, self.plugin, version)
|
||||
|
||||
|
||||
class EnvironmentFileGetter(ResourceGetter):
|
||||
|
||||
name = 'environment_file'
|
||||
description = """Looks for exactly one file with the specified extension in the owner's directory. If a version
|
||||
description = """Looks for exactly one file with the specified plugin in the owner's directory. If a version
|
||||
is specified on invocation of get, it will filter the discovered file based on that version.
|
||||
Versions are treated as case-insensitive."""
|
||||
|
||||
extension = None
|
||||
plugin = None
|
||||
|
||||
def register(self):
|
||||
self.resolver.register(self, self.extension, GetterPriority.environment)
|
||||
self.resolver.register(self, self.plugin, GetterPriority.environment)
|
||||
|
||||
def get(self, resource, **kwargs):
|
||||
resource_dir = resource.owner.dependencies_directory
|
||||
|
||||
version = kwargs.get('version')
|
||||
return get_from_location_by_extension(resource, resource_dir, self.extension, version)
|
||||
return get_from_location_by_plugin(resource, resource_dir, self.plugin, version)
|
||||
|
||||
|
||||
class ReventGetter(ResourceGetter):
|
||||
@ -95,12 +96,12 @@ class ReventGetter(ResourceGetter):
|
||||
|
||||
class PackageApkGetter(PackageFileGetter):
|
||||
name = 'package_apk'
|
||||
extension = 'apk'
|
||||
plugin = 'apk'
|
||||
|
||||
|
||||
class PackageJarGetter(PackageFileGetter):
|
||||
name = 'package_jar'
|
||||
extension = 'jar'
|
||||
plugin = 'jar'
|
||||
|
||||
|
||||
class PackageReventGetter(ReventGetter):
|
||||
@ -113,12 +114,12 @@ class PackageReventGetter(ReventGetter):
|
||||
|
||||
class EnvironmentApkGetter(EnvironmentFileGetter):
|
||||
name = 'environment_apk'
|
||||
extension = 'apk'
|
||||
plugin = 'apk'
|
||||
|
||||
|
||||
class EnvironmentJarGetter(EnvironmentFileGetter):
|
||||
name = 'environment_jar'
|
||||
extension = 'jar'
|
||||
plugin = 'jar'
|
||||
|
||||
|
||||
class EnvironmentReventGetter(ReventGetter):
|
||||
@ -254,10 +255,10 @@ class EnvironmentDependencyGetter(ResourceGetter):
|
||||
return path
|
||||
|
||||
|
||||
class ExtensionAssetGetter(DependencyFileGetter):
|
||||
class PluginAssetGetter(DependencyFileGetter):
|
||||
|
||||
name = 'extension_asset'
|
||||
resource_type = 'extension_asset'
|
||||
name = 'plugin_asset'
|
||||
resource_type = 'plugin_asset'
|
||||
|
||||
|
||||
class HttpGetter(ResourceGetter):
|
||||
@ -266,7 +267,7 @@ class HttpGetter(ResourceGetter):
|
||||
description = """
|
||||
Downloads resources from a server based on an index fetched from the specified URL.
|
||||
|
||||
Given a URL, this will try to fetch ``<URL>/index.json``. The index file maps extension
|
||||
Given a URL, this will try to fetch ``<URL>/index.json``. The index file maps plugin
|
||||
names to a list of corresponing asset descriptons. Each asset description continas a path
|
||||
(relative to the base URL) of the resource and a SHA256 hash, so that this Getter can
|
||||
verify whether the resource on the remote has changed.
|
||||
@ -291,7 +292,7 @@ class HttpGetter(ResourceGetter):
|
||||
|
||||
This Getter will look through the list of assets for "foo" (in this case, two) check
|
||||
the paths until it finds one matching the resource (in this case, "foo-app.apk").
|
||||
Finally, it will try to dowload that file relative to the base URL and extension name
|
||||
Finally, it will try to dowload that file relative to the base URL and plugin name
|
||||
(in this case, "http://example.com/assets/foo/foo-app.apk"). The downloaded version
|
||||
will be cached locally, so that in the future, the getter will check the SHA256 hash
|
||||
of the local file against the one advertised inside index.json, and provided that hasn't
|
||||
@ -374,7 +375,7 @@ class HttpGetter(ResourceGetter):
|
||||
if resource.name in ['apk', 'jar']:
|
||||
paths = [a['path'] for a in assets]
|
||||
version = getattr(resource, 'version', None)
|
||||
found = get_from_list_by_extension(resource, paths, resource.name, version)
|
||||
found = get_from_list_by_plugin(resource, paths, resource.name, version)
|
||||
if found:
|
||||
for a in assets:
|
||||
if a['path'] == found:
|
||||
@ -415,7 +416,7 @@ class RemoteFilerGetter(ResourceGetter):
|
||||
version = kwargs.get('version')
|
||||
if resource.owner:
|
||||
remote_path = os.path.join(self.remote_path, resource.owner.name)
|
||||
local_path = os.path.join(settings.environment_root, '__filer', resource.owner.dependencies_directory)
|
||||
local_path = os.path.join(settings.user_directory, '__filer', resource.owner.dependencies_directory)
|
||||
return self.try_get_resource(resource, version, remote_path, local_path)
|
||||
else:
|
||||
result = None
|
||||
@ -447,7 +448,7 @@ class RemoteFilerGetter(ResourceGetter):
|
||||
|
||||
def get_from(self, resource, version, location): # pylint: disable=no-self-use
|
||||
if resource.name in ['apk', 'jar']:
|
||||
return get_from_location_by_extension(resource, location, resource.name, version)
|
||||
return get_from_location_by_plugin(resource, location, resource.name, version)
|
||||
elif resource.name == 'file':
|
||||
filepath = os.path.join(location, resource.path)
|
||||
if os.path.exists(filepath):
|
||||
@ -456,7 +457,7 @@ class RemoteFilerGetter(ResourceGetter):
|
||||
filename = '.'.join([resource.owner.device.name, resource.stage, 'revent']).lower()
|
||||
alternate_location = os.path.join(location, 'revent_files')
|
||||
# There tends to be some confusion as to where revent files should
|
||||
# be placed. This looks both in the extension's directory, and in
|
||||
# be placed. This looks both in the plugin's directory, and in
|
||||
# 'revent_files' subdirectory under it, if it exists.
|
||||
if os.path.isdir(alternate_location):
|
||||
for candidate in os.listdir(alternate_location):
|
||||
@ -472,22 +473,22 @@ class RemoteFilerGetter(ResourceGetter):
|
||||
|
||||
# Utility functions
|
||||
|
||||
def get_from_location_by_extension(resource, location, extension, version=None):
|
||||
def get_from_location_by_plugin(resource, location, plugin, version=None):
|
||||
try:
|
||||
found_files = [os.path.join(location, f) for f in os.listdir(location)]
|
||||
except OSError:
|
||||
return None
|
||||
try:
|
||||
return get_from_list_by_extension(resource, found_files, extension, version)
|
||||
return get_from_list_by_plugin(resource, found_files, plugin, version)
|
||||
except ResourceError:
|
||||
raise ResourceError('More than one .{} found in {} for {}.'.format(extension,
|
||||
raise ResourceError('More than one .{} found in {} for {}.'.format(plugin,
|
||||
location,
|
||||
resource.owner.name))
|
||||
|
||||
|
||||
def get_from_list_by_extension(resource, filelist, extension, version=None):
|
||||
def get_from_list_by_plugin(resource, filelist, plugin, version=None):
|
||||
filelist = [ff for ff in filelist
|
||||
if os.path.splitext(ff)[1].lower().endswith(extension)]
|
||||
if os.path.splitext(ff)[1].lower().endswith(plugin)]
|
||||
if version:
|
||||
filelist = [ff for ff in filelist if version.lower() in os.path.basename(ff).lower()]
|
||||
if len(filelist) == 1:
|
||||
@ -495,7 +496,7 @@ def get_from_list_by_extension(resource, filelist, extension, version=None):
|
||||
elif not filelist:
|
||||
return None
|
||||
else:
|
||||
raise ResourceError('More than one .{} found in {} for {}.'.format(extension,
|
||||
raise ResourceError('More than one .{} found in {} for {}.'.format(plugin,
|
||||
filelist,
|
||||
resource.owner.name))
|
||||
|
||||
|
@ -52,28 +52,28 @@ workloads:
|
||||
"""
|
||||
|
||||
|
||||
class MockExtensionLoader(object):
|
||||
class MockPluginLoader(object):
|
||||
|
||||
def __init__(self):
|
||||
self.aliases = {}
|
||||
self.global_param_aliases = {}
|
||||
self.extensions = {
|
||||
self.plugins = {
|
||||
'defaults_workload': DefaultsWorkload(),
|
||||
'list_params': ListParamstrument(),
|
||||
}
|
||||
|
||||
def get_extension_class(self, name, kind=None): # pylint: disable=unused-argument
|
||||
return self.extensions.get(name, NamedMock(name))
|
||||
def get_plugin_class(self, name, kind=None): # pylint: disable=unused-argument
|
||||
return self.plugins.get(name, NamedMock(name))
|
||||
|
||||
def resolve_alias(self, name):
|
||||
return name, {}
|
||||
|
||||
def get_default_config(self, name): # pylint: disable=unused-argument
|
||||
ec = self.get_extension_class(name)
|
||||
ec = self.get_plugin_class(name)
|
||||
return {p.name: p.default for p in ec.parameters}
|
||||
|
||||
def has_extension(self, name):
|
||||
return name in self.aliases or name in self.extensions
|
||||
def has_plugin(self, name):
|
||||
return name in self.aliases or name in self.plugins
|
||||
|
||||
|
||||
class MockAgenda(object):
|
||||
@ -147,7 +147,7 @@ class ConfigLoaderTest(TestCase):
|
||||
class ConfigTest(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.config = RunConfiguration(MockExtensionLoader())
|
||||
self.config = RunConfiguration(MockPluginLoader())
|
||||
self.config.load_config({'device': 'MockDevice'})
|
||||
|
||||
def test_case(self):
|
||||
|
@ -94,6 +94,6 @@ class RuntimeParametersTest(TestCase):
|
||||
|
||||
|
||||
def _instantiate(cls, *args, **kwargs):
|
||||
# Needed to get around Extension's __init__ checks
|
||||
# Needed to get around Plugin's __init__ checks
|
||||
return cls(*args, **kwargs)
|
||||
|
||||
|
@ -1065,5 +1065,5 @@ class RunnerTest(TestCase):
|
||||
|
||||
|
||||
def _instantiate(cls, *args, **kwargs):
|
||||
# Needed to get around Extension's __init__ checks
|
||||
# Needed to get around Plugin's __init__ checks
|
||||
return cls(*args, **kwargs)
|
||||
|
@ -19,17 +19,17 @@ from unittest import TestCase
|
||||
|
||||
from nose.tools import assert_equal, raises, assert_true
|
||||
|
||||
from wlauto.core.extension import Extension, Parameter, Param, ExtensionMeta, Module
|
||||
from wlauto.core.plugin import Plugin, Parameter, Param, PluginMeta, Module
|
||||
from wlauto.utils.types import list_of_ints
|
||||
from wlauto.exceptions import ConfigError
|
||||
|
||||
|
||||
class MyMeta(ExtensionMeta):
|
||||
class MyMeta(PluginMeta):
|
||||
|
||||
virtual_methods = ['validate', 'virtual1', 'virtual2']
|
||||
|
||||
|
||||
class MyBaseExtension(Extension):
|
||||
class MyBasePlugin(Plugin):
|
||||
|
||||
__metaclass__ = MyMeta
|
||||
|
||||
@ -40,7 +40,7 @@ class MyBaseExtension(Extension):
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(MyBaseExtension, self).__init__(**kwargs)
|
||||
super(MyBasePlugin, self).__init__(**kwargs)
|
||||
self.v1 = 0
|
||||
self.v2 = 0
|
||||
self.v3 = ''
|
||||
@ -53,7 +53,7 @@ class MyBaseExtension(Extension):
|
||||
self.v2 += 1
|
||||
|
||||
|
||||
class MyAcidExtension(MyBaseExtension):
|
||||
class MyAcidPlugin(MyBasePlugin):
|
||||
|
||||
name = 'acid'
|
||||
|
||||
@ -64,7 +64,7 @@ class MyAcidExtension(MyBaseExtension):
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(MyAcidExtension, self).__init__(**kwargs)
|
||||
super(MyAcidPlugin, self).__init__(**kwargs)
|
||||
self.vv1 = 0
|
||||
self.vv2 = 0
|
||||
|
||||
@ -76,7 +76,7 @@ class MyAcidExtension(MyBaseExtension):
|
||||
self.vv2 += 1
|
||||
|
||||
|
||||
class MyOtherExtension(MyBaseExtension):
|
||||
class MyOtherPlugin(MyBasePlugin):
|
||||
|
||||
name = 'other'
|
||||
|
||||
@ -85,7 +85,7 @@ class MyOtherExtension(MyBaseExtension):
|
||||
Param('optional', allowed_values=['test', 'check']),
|
||||
]
|
||||
|
||||
class MyOtherOtherExtension(MyOtherExtension):
|
||||
class MyOtherOtherPlugin(MyOtherPlugin):
|
||||
|
||||
name = 'otherother'
|
||||
|
||||
@ -94,7 +94,7 @@ class MyOtherOtherExtension(MyOtherExtension):
|
||||
]
|
||||
|
||||
|
||||
class MyOverridingExtension(MyAcidExtension):
|
||||
class MyOverridingPlugin(MyAcidPlugin):
|
||||
|
||||
name = 'overriding'
|
||||
|
||||
@ -103,12 +103,12 @@ class MyOverridingExtension(MyAcidExtension):
|
||||
]
|
||||
|
||||
|
||||
class MyThirdTeerExtension(MyOverridingExtension):
|
||||
class MyThirdTeerPlugin(MyOverridingPlugin):
|
||||
|
||||
name = 'thirdteer'
|
||||
|
||||
|
||||
class MultiValueParamExt(Extension):
|
||||
class MultiValueParamExt(Plugin):
|
||||
|
||||
name = 'multivalue'
|
||||
|
||||
@ -140,7 +140,7 @@ class MyEvenCoolerModule(Module):
|
||||
self.owner.self_fizzle_factor += 2
|
||||
|
||||
|
||||
class MyModularExtension(Extension):
|
||||
class MyModularPlugin(Plugin):
|
||||
|
||||
name = 'modular'
|
||||
|
||||
@ -149,7 +149,7 @@ class MyModularExtension(Extension):
|
||||
]
|
||||
|
||||
|
||||
class MyOtherModularExtension(Extension):
|
||||
class MyOtherModularPlugin(Plugin):
|
||||
|
||||
name = 'other_modular'
|
||||
|
||||
@ -161,7 +161,7 @@ class MyOtherModularExtension(Extension):
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(MyOtherModularExtension, self).__init__(**kwargs)
|
||||
super(MyOtherModularPlugin, self).__init__(**kwargs)
|
||||
self.self_fizzle_factor = 0
|
||||
|
||||
|
||||
@ -178,35 +178,35 @@ class FakeLoader(object):
|
||||
return _instantiate(module, owner)
|
||||
|
||||
|
||||
class ExtensionMetaTest(TestCase):
|
||||
class PluginMetaTest(TestCase):
|
||||
|
||||
def test_propagation(self):
|
||||
acid_params = [p.name for p in MyAcidExtension.parameters]
|
||||
acid_params = [p.name for p in MyAcidPlugin.parameters]
|
||||
assert_equal(acid_params, ['modules', 'base', 'hydrochloric', 'citric', 'carbonic'])
|
||||
|
||||
@raises(ValueError)
|
||||
def test_duplicate_param_spec(self):
|
||||
class BadExtension(MyBaseExtension): # pylint: disable=W0612
|
||||
class BadPlugin(MyBasePlugin): # pylint: disable=W0612
|
||||
parameters = [
|
||||
Parameter('base'),
|
||||
]
|
||||
|
||||
def test_param_override(self):
|
||||
class OverridingExtension(MyBaseExtension): # pylint: disable=W0612
|
||||
class OverridingPlugin(MyBasePlugin): # pylint: disable=W0612
|
||||
parameters = [
|
||||
Parameter('base', override=True, default='cheese'),
|
||||
]
|
||||
assert_equal(OverridingExtension.parameters['base'].default, 'cheese')
|
||||
assert_equal(OverridingPlugin.parameters['base'].default, 'cheese')
|
||||
|
||||
@raises(ValueError)
|
||||
def test_invalid_param_spec(self):
|
||||
class BadExtension(MyBaseExtension): # pylint: disable=W0612
|
||||
class BadPlugin(MyBasePlugin): # pylint: disable=W0612
|
||||
parameters = [
|
||||
7,
|
||||
]
|
||||
|
||||
def test_virtual_methods(self):
|
||||
acid = _instantiate(MyAcidExtension)
|
||||
acid = _instantiate(MyAcidPlugin)
|
||||
acid.virtual1()
|
||||
assert_equal(acid.v1, 1)
|
||||
assert_equal(acid.vv1, 1)
|
||||
@ -221,7 +221,7 @@ class ExtensionMetaTest(TestCase):
|
||||
assert_equal(acid.vv2, 2)
|
||||
|
||||
def test_initialization(self):
|
||||
class MyExt(Extension):
|
||||
class MyExt(Plugin):
|
||||
name = 'myext'
|
||||
values = {'a': 0}
|
||||
def __init__(self, *args, **kwargs):
|
||||
@ -242,7 +242,7 @@ class ExtensionMetaTest(TestCase):
|
||||
assert_equal(ext.instance_init, 1)
|
||||
|
||||
def test_initialization_happens_once(self):
|
||||
class MyExt(Extension):
|
||||
class MyExt(Plugin):
|
||||
name = 'myext'
|
||||
values = {'a': 0}
|
||||
def __init__(self, *args, **kwargs):
|
||||
@ -281,19 +281,19 @@ class ExtensionMetaTest(TestCase):
|
||||
class ParametersTest(TestCase):
|
||||
|
||||
def test_setting(self):
|
||||
myext = _instantiate(MyAcidExtension, hydrochloric=[5, 6], citric=5, carbonic=42)
|
||||
myext = _instantiate(MyAcidPlugin, hydrochloric=[5, 6], citric=5, carbonic=42)
|
||||
assert_equal(myext.hydrochloric, [5, 6])
|
||||
assert_equal(myext.citric, '5')
|
||||
assert_equal(myext.carbonic, 42)
|
||||
|
||||
def test_validation_ok(self):
|
||||
myext = _instantiate(MyOtherExtension, mandatory='check', optional='check')
|
||||
myext = _instantiate(MyOtherPlugin, mandatory='check', optional='check')
|
||||
myext.validate()
|
||||
|
||||
def test_default_override(self):
|
||||
myext = _instantiate(MyOverridingExtension)
|
||||
myext = _instantiate(MyOverridingPlugin)
|
||||
assert_equal(myext.hydrochloric, [3, 4])
|
||||
myotherext = _instantiate(MyThirdTeerExtension)
|
||||
myotherext = _instantiate(MyThirdTeerPlugin)
|
||||
assert_equal(myotherext.hydrochloric, [3, 4])
|
||||
|
||||
def test_multivalue_param(self):
|
||||
@ -308,49 +308,42 @@ class ParametersTest(TestCase):
|
||||
|
||||
@raises(ConfigError)
|
||||
def test_validation_no_mandatory(self):
|
||||
myext = _instantiate(MyOtherExtension, optional='check')
|
||||
myext = _instantiate(MyOtherPlugin, optional='check')
|
||||
myext.validate()
|
||||
|
||||
@raises(ConfigError)
|
||||
def test_validation_no_mandatory_in_derived(self):
|
||||
_instantiate(MyOtherOtherExtension)
|
||||
_instantiate(MyOtherOtherPlugin)
|
||||
|
||||
@raises(ConfigError)
|
||||
def test_validation_bad_value(self):
|
||||
myext = _instantiate(MyOtherExtension, mandatory=1, optional='invalid')
|
||||
myext = _instantiate(MyOtherPlugin, mandatory=1, optional='invalid')
|
||||
myext.validate()
|
||||
|
||||
@raises(ValueError)
|
||||
def test_duplicate_param_override(self):
|
||||
class DuplicateParamExtension(MyBaseExtension): # pylint: disable=W0612
|
||||
parameters = [
|
||||
Parameter('base', override=True, default='buttery'),
|
||||
Parameter('base', override=True, default='biscuit'),
|
||||
]
|
||||
|
||||
@raises(ValueError)
|
||||
def test_overriding_new_param(self):
|
||||
class DuplicateParamExtension(MyBaseExtension): # pylint: disable=W0612
|
||||
class DuplicateParamPlugin(MyBasePlugin): # pylint: disable=W0612
|
||||
parameters = [
|
||||
Parameter('food', override=True, default='cheese'),
|
||||
]
|
||||
|
||||
|
||||
class ModuleTest(TestCase):
|
||||
|
||||
def test_fizzle(self):
|
||||
myext = _instantiate(MyModularExtension)
|
||||
myext = _instantiate(MyModularPlugin)
|
||||
myext.load_modules(FakeLoader())
|
||||
assert_true(myext.can('fizzle'))
|
||||
myext.fizzle()
|
||||
assert_equal(myext.fizzle_factor, 1)
|
||||
|
||||
def test_self_fizzle(self):
|
||||
myext = _instantiate(MyOtherModularExtension)
|
||||
myext = _instantiate(MyOtherModularPlugin)
|
||||
myext.load_modules(FakeLoader())
|
||||
myext.fizzle()
|
||||
assert_equal(myext.self_fizzle_factor, 2)
|
||||
|
||||
|
||||
def _instantiate(cls, *args, **kwargs):
|
||||
# Needed to get around Extension's __init__ checks
|
||||
# Needed to get around Plugin's __init__ checks
|
||||
return cls(*args, **kwargs)
|
||||
|
@ -20,32 +20,32 @@ from unittest import TestCase
|
||||
|
||||
from nose.tools import assert_equal, assert_greater
|
||||
|
||||
from wlauto.core.extension_loader import ExtensionLoader
|
||||
from wlauto.core.pluginloader import PluginLoader
|
||||
|
||||
|
||||
EXTDIR = os.path.join(os.path.dirname(__file__), 'data', 'extensions')
|
||||
EXTDIR = os.path.join(os.path.dirname(__file__), 'data', 'plugins')
|
||||
|
||||
|
||||
class ExtensionLoaderTest(TestCase):
|
||||
class PluginLoaderTest(TestCase):
|
||||
|
||||
def test_load_device(self):
|
||||
loader = ExtensionLoader(paths=[EXTDIR, ], load_defaults=False)
|
||||
loader = PluginLoader(paths=[EXTDIR, ], load_defaults=False)
|
||||
device = loader.get_device('test-device')
|
||||
assert_equal(device.name, 'test-device')
|
||||
|
||||
def test_list_by_kind(self):
|
||||
loader = ExtensionLoader(paths=[EXTDIR, ], load_defaults=False)
|
||||
loader = PluginLoader(paths=[EXTDIR, ], load_defaults=False)
|
||||
exts = loader.list_devices()
|
||||
assert_equal(len(exts), 1)
|
||||
assert_equal(exts[0].name, 'test-device')
|
||||
|
||||
def test_clear_and_reload(self):
|
||||
loader = ExtensionLoader()
|
||||
loader = PluginLoader()
|
||||
assert_greater(len(loader.list_devices()), 1)
|
||||
loader.clear()
|
||||
loader.update(paths=[EXTDIR, ])
|
||||
devices = loader.list_devices()
|
||||
assert_equal(len(devices), 1)
|
||||
assert_equal(devices[0].name, 'test-device')
|
||||
assert_equal(len(loader.list_extensions()), 1)
|
||||
assert_equal(len(loader.list_plugins()), 1)
|
||||
|
||||
|
@ -231,6 +231,6 @@ class InstrumentationTest(TestCase):
|
||||
|
||||
|
||||
def _instantiate(cls):
|
||||
# Needed to get around Extension's __init__ checks
|
||||
# Needed to get around Plugin's __init__ checks
|
||||
return cls()
|
||||
|
||||
|
@ -126,5 +126,5 @@ class ResultManagerTest(TestCase):
|
||||
|
||||
|
||||
def _instantiate(cls):
|
||||
# Needed to get around Extension's __init__ checks
|
||||
# Needed to get around Plugin's __init__ checks
|
||||
return cls()
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
"""
|
||||
This module contains utilities for generating user documentation for Workload
|
||||
Automation Extensions.
|
||||
Automation Plugins.
|
||||
|
||||
"""
|
||||
import re
|
||||
@ -32,7 +32,7 @@ def get_paragraphs(text):
|
||||
return [LINE_START.sub(' ', p) for p in PARAGRAPH_SEP.split(text)]
|
||||
|
||||
|
||||
class ExtensionDocumenter(object):
|
||||
class PluginDocumenter(object):
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -40,16 +40,16 @@ class ExtensionDocumenter(object):
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
"""Returns the summary description for this Extension, which, by
|
||||
"""Returns the summary description for this Plugin, which, by
|
||||
convention, is the first paragraph of the description."""
|
||||
return get_paragraphs(self.description)[0]
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""
|
||||
The description for an extension is specified in the ``description``
|
||||
attribute, or (legacy) as a docstring for the extension's class. If
|
||||
neither method is used in the Extension, an empty string is returned.
|
||||
The description for an plugin is specified in the ``description``
|
||||
attribute, or (legacy) as a docstring for the plugin's class. If
|
||||
neither method is used in the Plugin, an empty string is returned.
|
||||
|
||||
Description is assumed to be formed as reStructuredText. Leading and
|
||||
trailing whitespace will be stripped away.
|
||||
@ -64,13 +64,13 @@ class ExtensionDocumenter(object):
|
||||
|
||||
@property
|
||||
def parameters(self):
|
||||
return [ExtensionParameterDocumenter(p) for p in self.ext.parameters]
|
||||
return [PluginParameterDocumenter(p) for p in self.ext.parameters]
|
||||
|
||||
def __init__(self, ext):
|
||||
self.ext = ext
|
||||
|
||||
|
||||
class ExtensionParameterDocumenter(object):
|
||||
class PluginParameterDocumenter(object):
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@ -30,7 +30,7 @@ BULLET_CHARS = '-*'
|
||||
|
||||
def get_summary(aclass):
|
||||
"""
|
||||
Returns the summary description for an extension class. The summary is the
|
||||
Returns the summary description for an plugin class. The summary is the
|
||||
first paragraph (separated by blank line) of the description taken either from
|
||||
the ``descripton`` attribute of the class, or if that is not present, from the
|
||||
class' docstring.
|
||||
@ -41,7 +41,7 @@ def get_summary(aclass):
|
||||
|
||||
def get_description(aclass):
|
||||
"""
|
||||
Return the description of the specified extension class. The description is taken
|
||||
Return the description of the specified plugin class. The description is taken
|
||||
either from ``description`` attribute of the class or its docstring.
|
||||
|
||||
"""
|
||||
@ -291,7 +291,7 @@ def underline(text, symbol='='):
|
||||
return '{}\n{}\n\n'.format(text, symbol * len(text))
|
||||
|
||||
|
||||
def get_rst_from_extension(ext):
|
||||
def get_rst_from_plugin(ext):
|
||||
text = underline(ext.name, '-')
|
||||
if hasattr(ext, 'description'):
|
||||
desc = strip_inlined_text(ext.description or '')
|
||||
|
@ -21,7 +21,7 @@ import threading
|
||||
|
||||
import colorama
|
||||
|
||||
from wlauto.core.bootstrap import settings
|
||||
from wlauto.core.config.core import settings
|
||||
import wlauto.core.signal as signal
|
||||
|
||||
|
||||
@ -46,13 +46,13 @@ def init_logging(verbosity):
|
||||
console_handler = logging.StreamHandler()
|
||||
if verbosity == 1:
|
||||
console_handler.setLevel(logging.DEBUG)
|
||||
if 'colour_enabled' in settings.logging and not settings.logging['colour_enabled']:
|
||||
if 'colour' in settings.logging and not settings.logging['colour']:
|
||||
console_handler.setFormatter(LineFormatter(settings.logging['verbose_format']))
|
||||
else:
|
||||
console_handler.setFormatter(ColorFormatter(settings.logging['verbose_format']))
|
||||
else:
|
||||
console_handler.setLevel(logging.INFO)
|
||||
if 'colour_enabled' in settings.logging and not settings.logging['colour_enabled']:
|
||||
if 'colour' in settings.logging and not settings.logging['colour']:
|
||||
console_handler.setFormatter(LineFormatter(settings.logging['regular_format']))
|
||||
else:
|
||||
console_handler.setFormatter(ColorFormatter(settings.logging['regular_format']))
|
||||
|
@ -35,7 +35,7 @@ It's also possible to use the serializer directly::
|
||||
This can also be used to ``dump()`` POD structures. By default,
|
||||
``dump()`` will produce JSON, but ``fmt`` parameter may be used to
|
||||
specify an alternative format (``yaml`` or ``python``). ``load()`` will
|
||||
use the file extension to guess the format, but ``fmt`` may also be used
|
||||
use the file plugin to guess the format, but ``fmt`` may also be used
|
||||
to specify it explicitly.
|
||||
|
||||
"""
|
||||
|
@ -29,6 +29,7 @@ import os
|
||||
import re
|
||||
import math
|
||||
import shlex
|
||||
from bisect import insort
|
||||
from collections import defaultdict
|
||||
|
||||
from wlauto.utils.misc import isiterable, to_identifier
|
||||
@ -300,3 +301,112 @@ class arguments(list):
|
||||
def __str__(self):
|
||||
return ' '.join(self)
|
||||
|
||||
|
||||
class prioritylist(object):
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Returns an OrderedReceivers object that externaly behaves
|
||||
like a list but it maintains the order of its elements
|
||||
according to their priority.
|
||||
"""
|
||||
self.elements = defaultdict(list)
|
||||
self.is_ordered = True
|
||||
self.priorities = []
|
||||
self.size = 0
|
||||
self._cached_elements = None
|
||||
|
||||
def add(self, new_element, priority=0):
|
||||
"""
|
||||
adds a new item in the list.
|
||||
|
||||
- ``new_element`` the element to be inserted in the prioritylist
|
||||
- ``priority`` is the priority of the element which specifies its
|
||||
order withing the List
|
||||
"""
|
||||
self._add_element(new_element, priority)
|
||||
|
||||
def add_before(self, new_element, element):
|
||||
priority, index = self._priority_index(element)
|
||||
self._add_element(new_element, priority, index)
|
||||
|
||||
def add_after(self, new_element, element):
|
||||
priority, index = self._priority_index(element)
|
||||
self._add_element(new_element, priority, index + 1)
|
||||
|
||||
def index(self, element):
|
||||
return self._to_list().index(element)
|
||||
|
||||
def remove(self, element):
|
||||
index = self.index(element)
|
||||
self.__delitem__(index)
|
||||
|
||||
def _priority_index(self, element):
|
||||
for priority, elements in self.elements.iteritems():
|
||||
if element in elements:
|
||||
return (priority, elements.index(element))
|
||||
raise IndexError(element)
|
||||
|
||||
def _to_list(self):
|
||||
if self._cached_elements is None:
|
||||
self._cached_elements = []
|
||||
for priority in self.priorities:
|
||||
self._cached_elements += self.elements[priority]
|
||||
return self._cached_elements
|
||||
|
||||
def _add_element(self, element, priority, index=None):
|
||||
if index is None:
|
||||
self.elements[priority].append(element)
|
||||
else:
|
||||
self.elements[priority].insert(index, element)
|
||||
self.size += 1
|
||||
self._cached_elements = None
|
||||
if priority not in self.priorities:
|
||||
insort(self.priorities, priority)
|
||||
|
||||
def _delete(self, priority, priority_index):
|
||||
del self.elements[priority][priority_index]
|
||||
self.size -= 1
|
||||
if len(self.elements[priority]) == 0:
|
||||
self.priorities.remove(priority)
|
||||
self._cached_elements = None
|
||||
|
||||
def __iter__(self):
|
||||
for priority in reversed(self.priorities): # highest priority first
|
||||
for element in self.elements[priority]:
|
||||
yield element
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._to_list()[index]
|
||||
|
||||
def __delitem__(self, index):
|
||||
if isinstance(index, numbers.Integral):
|
||||
index = int(index)
|
||||
if index < 0:
|
||||
index_range = [len(self) + index]
|
||||
else:
|
||||
index_range = [index]
|
||||
elif isinstance(index, slice):
|
||||
index_range = range(index.start or 0, index.stop, index.step or 1)
|
||||
else:
|
||||
raise ValueError('Invalid index {}'.format(index))
|
||||
current_global_offset = 0
|
||||
priority_counts = {priority: count for (priority, count) in
|
||||
zip(self.priorities, [len(self.elements[p]) for p in self.priorities])}
|
||||
for priority in self.priorities:
|
||||
if not index_range:
|
||||
break
|
||||
priority_offset = 0
|
||||
while index_range:
|
||||
del_index = index_range[0]
|
||||
if priority_counts[priority] + current_global_offset <= del_index:
|
||||
current_global_offset += priority_counts[priority]
|
||||
break
|
||||
within_priority_index = del_index - \
|
||||
(current_global_offset + priority_offset)
|
||||
self._delete(priority, within_priority_index)
|
||||
priority_offset += 1
|
||||
index_range.pop(0)
|
||||
|
||||
def __len__(self):
|
||||
return self.size
|
||||
|
@ -18,7 +18,7 @@ import re
|
||||
import os
|
||||
import sys
|
||||
|
||||
from wlauto import Workload, Parameter, ExtensionLoader
|
||||
from wlauto import Workload, Parameter, PluginLoader
|
||||
from wlauto.exceptions import WorkloadError
|
||||
from wlauto.utils.android import ApkInfo
|
||||
import wlauto.common.android.resources
|
||||
@ -52,7 +52,7 @@ class Dex2oatBenchmark(Workload):
|
||||
|
||||
def init_resources(self, context):
|
||||
# TODO: find a better APK to use for this.
|
||||
peacekeeper = ExtensionLoader().get_workload('peacekeeper', self.device)
|
||||
peacekeeper = PluginLoader().get_workload('peacekeeper', self.device)
|
||||
self.apk_file = context.resolver.get(wlauto.common.android.resources.ApkFile(peacekeeper), version='chrome')
|
||||
self.package = ApkInfo(self.apk_file).package
|
||||
|
||||
|
@ -23,7 +23,7 @@ from wlauto import settings
|
||||
from wlauto.common.android.workload import GameWorkload
|
||||
from wlauto.exceptions import WorkloadError, DeviceError
|
||||
from wlauto.utils.misc import check_output
|
||||
from wlauto.common.resources import ExtensionAsset
|
||||
from wlauto.common.resources import PluginAsset
|
||||
|
||||
|
||||
class GunBros(GameWorkload):
|
||||
|
@ -23,7 +23,7 @@ from collections import defaultdict
|
||||
|
||||
from wlauto import Workload, Parameter, Alias
|
||||
from wlauto.exceptions import ConfigError, WorkloadError
|
||||
from wlauto.common.resources import ExtensionAsset
|
||||
from wlauto.common.resources import PluginAsset
|
||||
from wlauto.utils.misc import get_cpu_mask
|
||||
from wlauto.utils.types import boolean, list_or_string
|
||||
|
||||
@ -88,7 +88,7 @@ class Spec2000(Workload):
|
||||
- "scripts" contains one or more one-liner shell scripts that invoke the benchmark binary with
|
||||
appropriate command line parameters. The name of the script must be in the format
|
||||
<benchmark name>[.<variant name>].sh, i.e. name of benchmark, optionally followed by variant
|
||||
name, followed by ".sh" extension. If there is more than one script, then all of them must
|
||||
name, followed by ".sh" plugin. If there is more than one script, then all of them must
|
||||
have a variant; if there is only one script the it should not cotain a variant.
|
||||
|
||||
A typical bundle may look like this::
|
||||
@ -213,7 +213,7 @@ class Spec2000(Workload):
|
||||
self.loaded_benchmarks = {}
|
||||
self.categories = set()
|
||||
if self.force_extract_assets or len(os.listdir(self.dependencies_directory)) < 2:
|
||||
bundle = context.resolver.get(ExtensionAsset(self, self.asset_file))
|
||||
bundle = context.resolver.get(PluginAsset(self, self.asset_file))
|
||||
with tarfile.open(bundle, 'r:gz') as tf:
|
||||
tf.extractall(self.dependencies_directory)
|
||||
for entry in os.listdir(self.dependencies_directory):
|
||||
|
Loading…
x
Reference in New Issue
Block a user