diff --git a/wa/__init__.py b/wa/__init__.py
new file mode 100644
index 00000000..25cf6b22
--- /dev/null
+++ b/wa/__init__.py
@@ -0,0 +1,13 @@
+from wa.framework import pluginloader, log, signal
+from wa.framework.configuration import settings
+from wa.framework.plugin import Plugin, Parameter
+from wa.framework.command import Command
+from wa.framework.run import runmethod
+from wa.framework.output import RunOutput
+from wa.framework.workload import Workload
+
+from wa.framework.exception import WAError, NotFoundError, ValidationError, WorkloadError
+from wa.framework.exception import HostError, JobError, InstrumentError, ConfigError
+from wa.framework.exception import ResultProcessorError, ResourceError, CommandError, ToolError
+from wa.framework.exception import WorkerThreadError, PluginLoaderError
+
diff --git a/wa/commands/__init__.py b/wa/commands/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/wa/commands/run.py b/wa/commands/run.py
new file mode 100644
index 00000000..c967a316
--- /dev/null
+++ b/wa/commands/run.py
@@ -0,0 +1,87 @@
+#    Copyright 2014-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+import os
+import sys
+import shutil
+
+from wa import Command, settings
+from wa.framework import log
+from wa.framework.agenda import Agenda
+from wa.framework.output import RunOutput
+
+
+class RunCommand(Command):
+
+    name = 'run'
+    description = """
+    Execute automated workloads on a remote device and process the resulting output.
+    """
+
+    def initialize(self, context):
+        self.parser.add_argument('agenda', metavar='AGENDA',
+                                 help="""
+                                 Agenda for this workload automation run. This defines which
+                                 workloads will be executed, how many times, with which
+                                 tunables, etc.  See example agendas in {} for an example of
+                                 how this file should be structured.
+                                 """.format(os.path.dirname(wlauto.__file__)))
+        self.parser.add_argument('-d', '--output-directory', metavar='DIR', default=None,
+                                 help="""
+                                 Specify a directory where the output will be generated. If
+                                 the directory already exists, the script will abort unless -f
+                                 option (see below) is used, in which case the contents of the
+                                 directory will be overwritten. If this option is not specified,
+                                 then {} will be used instead.
+                                 """.format(settings.output_directory))
+        self.parser.add_argument('-f', '--force', action='store_true',
+                                 help="""
+                                 Overwrite output directory if it exists. By default, the script
+                                 will abort in this situation to prevent accidental data loss.
+                                 """)
+        self.parser.add_argument('-i', '--id', action='append', dest='only_run_ids', metavar='ID',
+                                 help="""
+                                 Specify a workload spec ID from an agenda to run. If this is
+                                 specified, only that particular spec will be run, and other
+                                 workloads in the agenda will be ignored. This option may be
+                                 used to specify multiple IDs.
+                                 """)
+        self.parser.add_argument('--disable', action='append', dest='instruments_to_disable',
+                                 metavar='INSTRUMENT', help="""
+                                 Specify an instrument to disable from the command line. This
+                                 equivalent to adding "~{metavar}" to the instrumentation list in
+                                 the agenda. This can be used to temporarily disable a troublesome
+                                 instrument for a particular run without introducing permanent
+                                 change to the config (which one might then forget to revert).
+                                 This option may be specified multiple times.
+                                 """)
+
+    def execute(self, args):  # NOQA
+        try:
+            executor = Executor(args.output_directory, args.force)
+        except RuntimeError:
+            self.logger.error('Output directory {} exists.'.format(args.output_directory))
+            self.logger.error('Please specify another location, or use -f option to overwrite.\n')
+            return 2
+        for path in settings.get_config_paths():
+            executor.load_config(path)
+        executor.load_agenda(args.agenda)
+        for itd in args.instruments_to_disable:
+            self.logger.debug('Globally disabling instrument "{}" (from command line option)'.format(itd))
+            executor.disable_instrument(itd)
+        executor.initialize()
+        executor.execute(selectors={'ids': args.only_run_ids})
+        executor.finalize()
diff --git a/wa/framework/__init__.py b/wa/framework/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/wa/framework/actor.py b/wa/framework/actor.py
new file mode 100644
index 00000000..dfb0ae59
--- /dev/null
+++ b/wa/framework/actor.py
@@ -0,0 +1,31 @@
+import uuid
+import logging
+
+from wa.framework import pluginloader
+from wa.framework.plugin import Plugin
+
+
+class JobActor(Plugin):
+
+    kind = 'job_actor'
+
+    def initialize(self, context):
+        pass
+
+    def run(self):
+        pass
+
+    def restart(self):
+        pass
+
+    def complete(self):
+        pass
+
+    def finalize(self):
+        pass
+
+
+class NullJobActor(JobActor):
+
+    name = 'null-job-actor'
+
diff --git a/wa/framework/agenda.py b/wa/framework/agenda.py
new file mode 100644
index 00000000..72041a4d
--- /dev/null
+++ b/wa/framework/agenda.py
@@ -0,0 +1,246 @@
+#    Copyright 2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+from copy import copy
+from collections import OrderedDict, defaultdict
+
+from wa.framework.exception import ConfigError, SerializerSyntaxError
+from wa.utils.serializer import yaml
+from wa.utils import counter
+
+
+def get_aliased_param(d, aliases, default=None, pop=True):
+    alias_map = [i for i, a in enumerate(aliases) if a in d]
+    if len(alias_map) > 1:
+        message = 'Only one of {} may be specified in a single entry'
+        raise ConfigError(message.format(aliases))
+    elif alias_map:
+        if pop:
+            return d.pop(aliases[alias_map[0]])
+        else:
+            return d[aliases[alias_map[0]]]
+    else:
+        return default
+
+
+class AgendaEntry(object):
+
+    def to_dict(self):
+        return copy(self.__dict__)
+
+    def __str__(self):
+        name = self.__class__.__name__.split('.')[-1]
+        if hasattr(self, 'id'):
+            return '{}({})'.format(name, self.id)
+        else:
+            return name
+
+    __repr__ = __str__
+
+
+class AgendaWorkloadEntry(AgendaEntry):
+    """
+    Specifies execution of a workload, including things like the number of
+    iterations, device runtime_parameters configuration, etc.
+
+    """
+
+    def __init__(self, **kwargs):
+        super(AgendaWorkloadEntry, self).__init__()
+        self.id = kwargs.pop('id')
+        self.workload_name = get_aliased_param(kwargs, ['workload_name', 'name'])
+        if not self.workload_name:
+            raise ConfigError('No workload name specified in entry {}'.format(self.id))
+        self.label = kwargs.pop('label', self.workload_name)
+        self.number_of_iterations = kwargs.pop('iterations', None)
+        self.boot_parameters = get_aliased_param(kwargs,
+                                                 ['boot_parameters', 'boot_params'],
+                                                 default=OrderedDict())
+        self.runtime_parameters = get_aliased_param(kwargs,
+                                                    ['runtime_parameters', 'runtime_params'],
+                                                    default=OrderedDict())
+        self.workload_parameters = get_aliased_param(kwargs,
+                                                     ['workload_parameters', 'workload_params', 'params'],
+                                                     default=OrderedDict())
+        self.instrumentation = kwargs.pop('instrumentation', [])
+        self.flash = kwargs.pop('flash', OrderedDict())
+        self.classifiers = kwargs.pop('classifiers', OrderedDict())
+        if kwargs:
+            raise ConfigError('Invalid entry(ies) in workload {}: {}'.format(self.id, ', '.join(kwargs.keys())))
+
+
+class AgendaSectionEntry(AgendaEntry):
+    """
+    Specifies execution of a workload, including things like the number of
+    iterations, device runtime_parameters configuration, etc.
+
+    """
+
+    def __init__(self, agenda, **kwargs):
+        super(AgendaSectionEntry, self).__init__()
+        self.id = kwargs.pop('id')
+        self.number_of_iterations = kwargs.pop('iterations', None)
+        self.boot_parameters = get_aliased_param(kwargs,
+                                                 ['boot_parameters', 'boot_params'],
+                                                 default=OrderedDict())
+        self.runtime_parameters = get_aliased_param(kwargs,
+                                                    ['runtime_parameters', 'runtime_params', 'params'],
+                                                    default=OrderedDict())
+        self.workload_parameters = get_aliased_param(kwargs,
+                                                     ['workload_parameters', 'workload_params'],
+                                                     default=OrderedDict())
+        self.instrumentation = kwargs.pop('instrumentation', [])
+        self.flash = kwargs.pop('flash', OrderedDict())
+        self.classifiers = kwargs.pop('classifiers', OrderedDict())
+        self.workloads = []
+        for w in kwargs.pop('workloads', []):
+            self.workloads.append(agenda.get_workload_entry(w))
+        if kwargs:
+            raise ConfigError('Invalid entry(ies) in section {}: {}'.format(self.id, ', '.join(kwargs.keys())))
+
+    def to_dict(self):
+        d = copy(self.__dict__)
+        d['workloads'] = [w.to_dict() for w in self.workloads]
+        return d
+
+
+class AgendaGlobalEntry(AgendaEntry):
+    """
+    Workload configuration global to all workloads.
+
+    """
+
+    def __init__(self, **kwargs):
+        super(AgendaGlobalEntry, self).__init__()
+        self.number_of_iterations = kwargs.pop('iterations', None)
+        self.boot_parameters = get_aliased_param(kwargs,
+                                                 ['boot_parameters', 'boot_params'],
+                                                 default=OrderedDict())
+        self.runtime_parameters = get_aliased_param(kwargs,
+                                                    ['runtime_parameters', 'runtime_params', 'params'],
+                                                    default=OrderedDict())
+        self.workload_parameters = get_aliased_param(kwargs,
+                                                     ['workload_parameters', 'workload_params'],
+                                                     default=OrderedDict())
+        self.instrumentation = kwargs.pop('instrumentation', [])
+        self.flash = kwargs.pop('flash', OrderedDict())
+        self.classifiers = kwargs.pop('classifiers', OrderedDict())
+        if kwargs:
+            raise ConfigError('Invalid entries in global section: {}'.format(kwargs))
+
+
+class Agenda(object):
+
+    def __init__(self, source=None):
+        self.filepath = None
+        self.config = None
+        self.global_ = None
+        self.sections = []
+        self.workloads = []
+        self._seen_ids = defaultdict(set)
+        if source:
+            try:
+                counter.reset('section')
+                counter.reset('workload')
+                self._load(source)
+            except (ConfigError, SerializerSyntaxError, SyntaxError), e:
+                raise ConfigError(str(e))
+
+    def add_workload_entry(self, w):
+        entry = self.get_workload_entry(w)
+        self.workloads.append(entry)
+
+    def get_workload_entry(self, w):
+        if isinstance(w, basestring):
+            w = {'name': w}
+        if not isinstance(w, dict):
+            raise ConfigError('Invalid workload entry: "{}" in {}'.format(w, self.filepath))
+        self._assign_id_if_needed(w, 'workload')
+        return AgendaWorkloadEntry(**w)
+
+    def expand(self, target):
+        # TODO: currently a no-op, this method is here to support future features, such
+        #       as section cross products and sweeps.
+        pass
+
+    def _load(self, source):  # pylint: disable=too-many-branches
+        try:
+            raw = self._load_raw_from_source(source)
+        except SerializerSyntaxError as e:
+            name = getattr(source, 'name', '')
+            raise ConfigError('Error parsing agenda {}: {}'.format(name, e))
+        if not isinstance(raw, dict):
+            message = '{} does not contain a valid agenda structure; top level must be a dict.'
+            raise ConfigError(message.format(self.filepath))
+        for k, v in raw.iteritems():
+            if k == 'config':
+                if not isinstance(v, dict):
+                    raise ConfigError('Invalid agenda: "config" entry must be a dict')
+                self.config = v
+            elif k == 'global':
+                self.global_ = AgendaGlobalEntry(**v)
+            elif k == 'sections':
+                self._collect_existing_ids(v, 'section')
+                for s in v:
+                    if not isinstance(s, dict):
+                        raise ConfigError('Invalid section entry: "{}" in {}'.format(s, self.filepath))
+                    self._collect_existing_ids(s.get('workloads', []), 'workload')
+                for s in v:
+                    self._assign_id_if_needed(s, 'section')
+                    self.sections.append(AgendaSectionEntry(self, **s))
+            elif k == 'workloads':
+                self._collect_existing_ids(v, 'workload')
+                for w in v:
+                    self.workloads.append(self.get_workload_entry(w))
+            else:
+                raise ConfigError('Unexpected agenda entry "{}" in {}'.format(k, self.filepath))
+
+    def _load_raw_from_source(self, source):
+        if hasattr(source, 'read') and hasattr(source, 'name'):  # file-like object
+            self.filepath = source.name
+            raw = yaml.load(source)
+        elif isinstance(source, basestring):
+            if os.path.isfile(source):
+                self.filepath = source
+                with open(source, 'rb') as fh:
+                    raw = yaml.load(fh)
+            else:  # assume YAML text
+                raw = yaml.loads(source)
+        else:
+            raise ConfigError('Unknown agenda source: {}'.format(source))
+        return raw
+
+    def _collect_existing_ids(self, ds, pool):
+        # Collection needs to take place first  so that auto IDs can be
+        # correctly assigned, e.g. if someone explicitly specified an ID
+        # of '1' for one of the workloads.
+        for d in ds:
+            if isinstance(d, dict) and 'id' in d:
+                did = str(d['id'])
+                if did in self._seen_ids[pool]:
+                    raise ConfigError('Duplicate {} ID: {}'.format(pool, did))
+                self._seen_ids[pool].add(did)
+
+    def _assign_id_if_needed(self, d, pool):
+        # Also enforces string IDs
+        if d.get('id') is None:
+            did = str(counter.next(pool))
+            while did in self._seen_ids[pool]:
+                did = str(counter.next(pool))
+            d['id'] = did
+            self._seen_ids[pool].add(did)
+        else:
+            d['id'] = str(d['id'])
diff --git a/wa/framework/command.py b/wa/framework/command.py
new file mode 100644
index 00000000..644ffd2c
--- /dev/null
+++ b/wa/framework/command.py
@@ -0,0 +1,68 @@
+#    Copyright 2014-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import textwrap
+
+from wa.framework.plugin import Plugin
+from wa.framework.entrypoint import init_argument_parser
+from wa.utils.doc import format_body
+
+
+class Command(Plugin):
+    """
+    Defines a Workload Automation command. This will be executed from the command line as
+    ``wa <command> [args ...]``. This defines the name to be used when invoking wa, the
+    code that will actually be executed on invocation and the argument parser to be used
+    to parse the reset of the command line arguments.
+
+    """
+
+    kind = 'command'
+    help = None
+    usage = None
+    description = None
+    epilog = None
+    formatter_class = None
+
+    def __init__(self, subparsers, **kwargs):
+        super(Command, self).__init__(**kwargs)
+        self.group = subparsers
+        parser_params = dict(help=(self.help or self.description), usage=self.usage,
+                             description=format_body(textwrap.dedent(self.description), 80),
+                             epilog=self.epilog)
+        if self.formatter_class:
+            parser_params['formatter_class'] = self.formatter_class
+        self.parser = subparsers.add_parser(self.name, **parser_params)
+        init_argument_parser(self.parser)  # propagate top-level options
+        self.initialize(None)
+
+    def initialize(self, context):
+        """
+        Perform command-specific initialisation (e.g. adding command-specific options to the command's
+        parser). ``context`` is always ``None``.
+
+        """
+        pass
+
+    def execute(self, args):
+        """
+        Execute this command.
+
+        :args: An ``argparse.Namespace`` containing command line arguments (as returned by
+               ``argparse.ArgumentParser.parse_args()``. This would usually be the result of
+               invoking ``self.parser``.
+
+        """
+        raise NotImplementedError()
diff --git a/wa/framework/configuration/__init__.py b/wa/framework/configuration/__init__.py
new file mode 100644
index 00000000..5c1be001
--- /dev/null
+++ b/wa/framework/configuration/__init__.py
@@ -0,0 +1,2 @@
+from wa.framework.configuration.core import settings, ConfigurationPoint, PluginConfiguration
+from wa.framework.configuration.core import merge_config_values, WA_CONFIGURATION
diff --git a/wa/framework/configuration/core.py b/wa/framework/configuration/core.py
new file mode 100644
index 00000000..7c33d746
--- /dev/null
+++ b/wa/framework/configuration/core.py
@@ -0,0 +1,639 @@
+import os
+import logging
+from glob import glob
+from copy import copy
+from itertools import chain
+
+from wa.framework import pluginloader
+from wa.framework.exception import ConfigError
+from wa.utils.types import integer, boolean, identifier, list_of_strings, list_of
+from wa.utils.misc import isiterable, get_article
+from wa.utils.serializer import read_pod, yaml
+
+
+class ConfigurationPoint(object):
+    """
+    This defines a gneric configuration point for workload automation. This is
+    used to handle global settings, plugin parameters, etc.
+
+    """
+
+    # Mapping for kind conversion; see docs for convert_types below
+    kind_map = {
+        int: integer,
+        bool: boolean,
+    }
+
+    def __init__(self, name,
+                 kind=None,
+                 mandatory=None,
+                 default=None,
+                 override=False,
+                 allowed_values=None,
+                 description=None,
+                 constraint=None,
+                 merge=False,
+                 aliases=None,
+                 convert_types=True):
+        """
+        Create a new Parameter object.
+
+        :param name: The name of the parameter. This will become an instance
+                     member of the extension object to which the parameter is
+                     applied, so it must be a valid python  identifier. This
+                     is the only mandatory parameter.
+        :param kind: The type of parameter this is. This must be a callable
+                     that takes an arbitrary object and converts it to the
+                     expected type, or raised ``ValueError`` if such conversion
+                     is not possible. Most Python standard types -- ``str``,
+                     ``int``, ``bool``, etc. -- can be used here. This
+                     defaults to ``str`` if not specified.
+        :param mandatory: If set to ``True``, then a non-``None`` value for
+                          this parameter *must* be provided on extension
+                          object construction, otherwise ``ConfigError``
+                          will be raised.
+        :param default: The default value for this parameter. If no value
+                        is specified on extension construction, this value
+                        will be used instead. (Note: if this is specified
+                        and is not ``None``, then ``mandatory`` parameter
+                        will be ignored).
+        :param override: A ``bool`` that specifies whether a parameter of
+                         the same name further up the hierarchy should
+                         be overridden. If this is ``False`` (the
+                         default), an exception will be raised by the
+                         ``AttributeCollection`` instead.
+        :param allowed_values: This should be the complete list of allowed
+                               values for this parameter.  Note: ``None``
+                               value will always be allowed, even if it is
+                               not in this list.  If you want to disallow
+                               ``None``, set ``mandatory`` to ``True``.
+        :param constraint: If specified, this must be a callable that takes
+                           the parameter value as an argument and return a
+                           boolean indicating whether the constraint has been
+                           satisfied. Alternatively, can be a two-tuple with
+                           said callable as the first element and a string
+                           describing the constraint as the second.
+        :param merge: The default behaviour when setting a value on an object
+                      that already has that attribute is to overrided with
+                      the new value. If this is set to ``True`` then the two
+                      values will be merged instead. The rules by which the
+                      values are merged will be determined by the types of
+                      the existing and new values -- see 
+                      ``merge_config_values`` documentation for details.
+        :param aliases: Alternative names for the same configuration point.
+                        These are largely for backwards compatibility.
+        :param convert_types: If ``True`` (the default), will automatically
+                              convert ``kind`` values from native Python
+                              types to WA equivalents. This allows more
+                              ituitive interprestation of parameter values,
+                              e.g. the string ``"false"`` being interpreted
+                              as ``False`` when specifed as the value for
+                              a boolean Parameter.
+
+        """
+        self.name = identifier(name)
+        if kind is not None and not callable(kind):
+            raise ValueError('Kind must be callable.')
+        if convert_types and kind in self.kind_map:
+            kind = self.kind_map[kind]
+        self.kind = kind
+        self.mandatory = mandatory
+        self.default = default
+        self.override = override
+        self.allowed_values = allowed_values
+        self.description = description
+        if self.kind is None and not self.override:
+            self.kind = str
+        if constraint is not None and not callable(constraint) and not isinstance(constraint, tuple):
+            raise ValueError('Constraint must be callable or a (callable, str) tuple.')
+        self.constraint = constraint
+        self.merge = merge
+        self.aliases = aliases or []
+
+    def match(self, name):
+        if name == self.name:
+            return True
+        elif name in self.aliases:
+            return True
+        return False
+
+    def set_value(self, obj, value=None):
+        if value is None:
+            if self.default is not None:
+                value = self.default
+            elif self.mandatory:
+                msg = 'No values specified for mandatory parameter {} in {}'
+                raise ConfigError(msg.format(self.name, obj.name))
+        else:
+            try:
+                value = self.kind(value)
+            except (ValueError, TypeError):
+                typename = self.get_type_name()
+                msg = 'Bad value "{}" for {}; must be {} {}'
+                article = get_article(typename)
+                raise ConfigError(msg.format(value, self.name, article, typename))
+        if self.merge and hasattr(obj, self.name):
+            value = merge_config_values(getattr(obj, self.name), value)
+        setattr(obj, self.name, value)
+
+    def validate(self, obj):
+        value = getattr(obj, self.name, None)
+        self.validate_value(value)
+
+    def validate_value(self,obj, value):
+        if value is not None:
+            if self.allowed_values:
+                self._validate_allowed_values(obj, value)
+            if self.constraint:
+                self._validate_constraint(obj, value)
+        else:
+            if self.mandatory:
+                msg = 'No value specified for mandatory parameter {} in {}.'
+                raise ConfigError(msg.format(self.name, obj.name))
+
+    def get_type_name(self):
+        typename = str(self.kind)
+        if '\'' in typename:
+            typename = typename.split('\'')[1]
+        elif typename.startswith('<function'):
+            typename = typename.split()[1]
+        return typename
+
+    def _validate_allowed_values(self, obj, value):
+        if 'list' in str(self.kind):
+            for v in value:
+                if v not in self.allowed_values:
+                    msg = 'Invalid value {} for {} in {}; must be in {}'
+                    raise ConfigError(msg.format(v, self.name, obj.name, self.allowed_values))
+        else:
+            if value not in self.allowed_values:
+                msg = 'Invalid value {} for {} in {}; must be in {}'
+                raise ConfigError(msg.format(value, self.name, obj.name, self.allowed_values))
+
+    def _validate_constraint(self, obj, value):
+        msg_vals = {'value': value, 'param': self.name, 'extension': obj.name}
+        if isinstance(self.constraint, tuple) and len(self.constraint) == 2:
+            constraint, msg = self.constraint  # pylint: disable=unpacking-non-sequence
+        elif callable(self.constraint):
+            constraint = self.constraint
+            msg = '"{value}" failed constraint validation for {param} in {extension}.'
+        else:
+            raise ValueError('Invalid constraint for {}: must be callable or a 2-tuple'.format(self.name))
+        if not constraint(value):
+            raise ConfigError(value, msg.format(**msg_vals))
+
+    def __repr__(self):
+        d = copy(self.__dict__)
+        del d['description']
+        return 'ConfPoint({})'.format(d)
+
+    __str__ = __repr__
+
+
+class ConfigurationPointCollection(object):
+
+    def __init__(self):
+        self._configs = []
+        self._config_map = {}
+
+    def get(self, name, default=None):
+        return self._config_map.get(name, default)
+
+    def add(self, point):
+        if not isinstance(point, ConfigurationPoint):
+            raise ValueError('Mustbe a ConfigurationPoint, got {}'.format(point.__class__))
+        existing = self.get(point.name)
+        if existing:
+            if point.override:
+                new_point = copy(existing)
+                for a, v in point.__dict__.iteritems():
+                    if v is not None:
+                        setattr(new_point, a, v)
+                self.remove(existing)
+                point = new_point
+            else:
+                raise ValueError('Duplicate ConfigurationPoint "{}"'.format(point.name))
+        self._add(point)
+
+    def remove(self, point):
+        self._configs.remove(point)
+        del self._config_map[point.name]
+        for alias in point.aliases:
+            del self._config_map[alias]
+
+    append = add
+
+    def _add(self, point):
+        self._configs.append(point)
+        self._config_map[point.name] = point
+        for alias in point.aliases:
+            if alias in self._config_map:
+                message = 'Clashing alias "{}" between "{}" and "{}"'
+                raise ValueError(message.format(alias, point.name,
+                                                self._config_map[alias].name))
+
+    def __str__(self):
+        str(self._configs)
+
+    __repr__ = __str__
+
+    def __iadd__(self, other):
+        for p in other:
+            self.add(p)
+        return self
+
+    def __iter__(self):
+        return iter(self._configs)
+
+    def __contains__(self, p):
+        if isinstance(p, basestring):
+            return p in self._config_map
+        return p.name in self._config_map
+
+    def __getitem__(self, i):
+        if isinstance(i, int):
+            return self._configs[i]
+        return self._config_map[i]
+
+    def __len__(self):
+        return len(self._configs)
+
+    
+class LoggingConfig(dict):
+
+    defaults = {
+        'file_format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',
+        'verbose_format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',
+        'regular_format': '%(levelname)-8s %(message)s',
+        'color': True,
+    }
+
+    def __init__(self, config=None):
+        if isinstance(config, dict):
+            config = {identifier(k.lower()): v for k, v in config.iteritems()}
+            self['regular_format'] = config.pop('regular_format', self.defaults['regular_format'])
+            self['verbose_format'] = config.pop('verbose_format', self.defaults['verbose_format'])
+            self['file_format'] = config.pop('file_format', self.defaults['file_format'])
+            self['color'] = config.pop('colour_enabled', self.defaults['color'])  # legacy
+            self['color'] = config.pop('color', self.defaults['color'])
+            if config:
+                message = 'Unexpected logging configuation parameters: {}'
+                raise ValueError(message.format(bad_vals=', '.join(config.keys())))
+        elif config is None:
+            for k, v in self.defaults.iteritems():
+                self[k] = v
+        else:
+            raise ValueError(config)
+
+
+__WA_CONFIGURATION = [
+    ConfigurationPoint(
+        'user_directory',
+        description="""
+        Path to the user directory. This is the location WA will look for
+        user configuration, additional plugins and plugin dependencies.
+        """,
+        kind=str,
+        default=os.path.join(os.path.expanduser('~'), '.workload_automation'),
+    ),
+    ConfigurationPoint(
+        'plugin_packages',
+        kind=list_of_strings,
+        default=[
+            'wa.commands',
+            'wa.workloads',
+#            'wa.instruments',
+#            'wa.processors',
+#            'wa.targets',
+            'wa.framework.actor',
+            'wa.framework.target',
+            'wa.framework.resource',
+            'wa.framework.execution',
+        ],
+        description="""
+        List of packages that will be scanned for WA plugins.
+        """,
+    ),
+    ConfigurationPoint(
+        'plugin_paths',
+        kind=list_of_strings,
+        default=[
+            'workloads',
+            'instruments',
+            'targets',
+            'processors',
+
+            # Legacy
+            'devices',
+            'result_processors',
+        ],
+        description="""
+        List of paths that will be scanned for WA plugins.
+        """,
+    ),
+    ConfigurationPoint(
+        'plugin_ignore_paths',
+        kind=list_of_strings,
+        default=[],
+        description="""
+        List of (sub)paths that will be ignored when scanning 
+        ``plugin_paths`` for WA plugins.
+        """,
+    ),
+    ConfigurationPoint(
+        'filer_mount_point',
+        description="""
+        The local mount point for the filer hosting WA assets.
+        """,
+    ),
+    ConfigurationPoint(
+        'logging',
+        kind=LoggingConfig,
+        description="""
+        WA logging configuration. This should be a dict with a subset
+        of the following keys::
+
+        :normal_format: Logging format used for console output
+        :verbose_format: Logging format used for verbose console output 
+        :file_format: Logging format used for run.log
+        :color: If ``True`` (the default), console logging output will
+                contain bash color escape codes. Set this to ``False`` if
+                console output will be piped somewhere that does not know
+                how to handle those.
+        """,
+    ),
+    ConfigurationPoint(
+        'verbosity',
+        kind=int,
+        default=0,
+        description="""
+        Verbosity of console output.
+        """,
+    ),
+]
+
+WA_CONFIGURATION = {cp.name: cp for cp in __WA_CONFIGURATION}
+
+ENVIRONMENT_VARIABLES = {
+    'WA_USER_DIRECTORY': WA_CONFIGURATION['user_directory'],
+    'WA_PLUGIN_PATHS': WA_CONFIGURATION['plugin_paths'],
+    'WA_EXTENSION_PATHS': WA_CONFIGURATION['plugin_paths'],  # extension_paths (legacy)
+}
+
+
+class WAConfiguration(object):
+    """
+    This is configuration for Workload Automation framework as a whole. This
+    does not track configuration for WA runs. Rather, this tracks "meta" 
+    configuration, such as various locations WA looks for things, logging
+    configuration etc.
+
+    """
+
+    basename = 'config'
+
+    def __init__(self):
+        self.user_directory = ''
+        self.dependencies_directory = 'dependencies'
+        self.plugin_packages = []
+        self.plugin_paths = []
+        self.plugin_ignore_paths = []
+        self.logging = {}
+        self._logger = logging.getLogger('settings')
+        for confpoint in WA_CONFIGURATION.itervalues():
+            confpoint.set_value(self)
+
+    def load_environment(self):
+        for name, confpoint in ENVIRONMENT_VARIABLES.iteritems():
+            value = os.getenv(name)
+            if value:
+                confpoint.set_value(self, value)
+        self._expand_paths()
+
+    def load_config_file(self, path):
+        self.load(read_pod(path))
+
+    def load_user_config(self):
+        globpath = os.path.join(self.user_directory, '{}.*'.format(self.basename))
+        for path in glob(globpath):
+            ext = os.path.splitext(path)[1].lower()
+            if ext in ['.pyc', '.pyo']:
+                continue
+            self.load_config_file(path)
+
+    def load(self, config):
+        for name, value in config.iteritems():
+            if name in WA_CONFIGURATION:
+                confpoint = WA_CONFIGURATION[name]
+                confpoint.set_value(self, value)
+        self._expand_paths()
+
+    def set(self, name, value):
+        if name not in WA_CONFIGURATION:
+            raise ConfigError('Unknown WA configuration "{}"'.format(name))
+        WA_CONFIGURATION[name].set_value(value)
+
+    def initialize_user_directory(self, overwrite=False):
+        """
+        Initialize a fresh user environment creating the workload automation.
+
+        """
+        if os.path.exists(self.user_directory):
+            if not overwrite:
+                raise ConfigError('Environment {} already exists.'.format(self.user_directory))
+            shutil.rmtree(self.user_directory)
+
+        self._expand_paths()
+        os.makedirs(self.dependencies_directory)
+        for path in self.plugin_paths:
+            os.makedirs(path)
+
+        with open(os.path.join(self.user_directory, 'config.yaml'), 'w') as wfh:
+            yaml.dump(self.to_pod())
+
+        if os.getenv('USER') == 'root':
+            # If running with sudo on POSIX, change the ownership to the real user.
+            real_user = os.getenv('SUDO_USER')
+            if real_user:
+                import pwd  # done here as module won't import on win32
+                user_entry = pwd.getpwnam(real_user)
+                uid, gid = user_entry.pw_uid, user_entry.pw_gid
+                os.chown(self.user_directory, uid, gid)
+                # why, oh why isn't there a recusive=True option for os.chown?
+                for root, dirs, files in os.walk(self.user_directory):
+                    for d in dirs:
+                        os.chown(os.path.join(root, d), uid, gid)
+                    for f in files:
+                        os.chown(os.path.join(root, f), uid, gid)
+
+    @staticmethod
+    def from_pod(pod):
+        instance = WAConfiguration()
+        instance.load(pod)
+        return instance
+
+    def to_pod(self):
+        return dict(
+            user_directory=self.user_directory,
+            plugin_packages=self.plugin_packages,
+            plugin_paths=self.plugin_paths,
+            plugin_ignore_paths=self.plugin_ignore_paths,
+            logging=self.logging,
+        )
+
+    def _expand_paths(self):
+        self.dependencies_directory = os.path.join(self.user_directory, 
+                                                   self.dependencies_directory)
+        expanded = []
+        for path in self.plugin_paths:
+            path = os.path.expanduser(path)
+            path = os.path.expandvars(path)
+            expanded.append(os.path.join(self.user_directory, path))
+        self.plugin_paths = expanded
+        expanded = []
+        for path in self.plugin_ignore_paths:
+            path = os.path.expanduser(path)
+            path = os.path.expandvars(path)
+            exanded.append(os.path.join(self.user_directory, path))
+        self.pluing_ignore_paths = expanded
+
+
+class PluginConfiguration(object):
+    """ Maintains a mapping of plugin_name --> plugin_config. """
+
+    def __init__(self, loader=pluginloader):
+        self.loader = loader
+        self.config = {}
+
+    def update(self, name, config):
+        if not hasattr(config, 'get'):
+            raise ValueError('config must be a dict-like object got: {}'.format(config))
+        name, alias_config = self.loader.resolve_alias(name)
+        existing_config = self.config.get(name)
+        if existing_config is None:
+            existing_config = alias_config
+
+        new_config = config or {}
+        plugin_cls = self.loader.get_plugin_class(name)
+
+
+
+def merge_config_values(base, other):
+    """
+    This is used to merge two objects, typically when setting the value of a
+    ``ConfigurationPoint``. First, both objects are categorized into
+
+        c: A scalar value. Basically, most objects. These values
+           are treated as atomic, and not mergeable.
+        s: A sequence. Anything iterable that is not a dict or
+           a string (strings are considered scalars).
+        m: A key-value mapping. ``dict`` and its derivatives.
+        n: ``None``.
+        o: A mergeable object; this is an object that implements both
+          ``merge_with`` and ``merge_into`` methods.
+
+    The merge rules based on the two categories are then as follows:
+
+        (c1, c2) --> c2
+        (s1, s2) --> s1 . s2
+        (m1, m2) --> m1 . m2
+        (c, s) --> [c] . s
+        (s, c) --> s . [c]
+        (s, m) --> s . [m]
+        (m, s) --> [m] . s
+        (m, c) --> ERROR
+        (c, m) --> ERROR
+        (o, X) --> o.merge_with(X)
+        (X, o) --> o.merge_into(X)
+        (X, n) --> X
+        (n, X) --> X
+
+    where:
+
+        '.'  means concatenation (for maps, contcationation of (k, v) streams
+             then converted back into a map). If the types of the two objects
+             differ, the type of ``other`` is used for the result.
+        'X'  means "any category"
+        '[]' used to indicate a literal sequence (not necessarily a ``list``).
+             when this is concatenated with an actual sequence, that sequencies
+             type is used.
+
+    notes:
+
+        - When a mapping is combined with a sequence, that mapping is
+          treated as a scalar value.
+        - When combining two mergeable objects, they're combined using
+          ``o1.merge_with(o2)`` (_not_ using o2.merge_into(o1)).
+        - Combining anything with ``None`` yields that value, irrespective
+          of the order. So a ``None`` value is eqivalent to the corresponding
+          item being omitted.
+        - When both values are scalars, merging is equivalent to overwriting.
+        - There is no recursion (e.g. if map values are lists, they will not
+          be merged; ``other`` will overwrite ``base`` values). If complicated
+          merging semantics (such as recursion) are required, they should be
+          implemented within custom mergeable types (i.e. those that implement
+          ``merge_with`` and ``merge_into``).
+
+    While this can be used as a generic "combine any two arbitrary objects" 
+    function, the semantics have been selected specifically for merging
+    configuration point values.
+
+    """
+    cat_base = categorize(base)
+    cat_other = categorize(other)
+
+    if cat_base == 'n':
+        return other
+    elif cat_other == 'n':
+        return base
+
+    if cat_base == 'o':
+        return base.merge_with(other)
+    elif cat_other == 'o':
+        return other.merge_into(base)
+
+    if cat_base == 'm':
+        if cat_other == 's':
+            return merge_sequencies([base], other)
+        elif cat_other == 'm':
+            return merge_maps(base, other)
+        else:
+            message = 'merge error ({}, {}): "{}" and "{}"'
+            raise ValueError(message.format(cat_base, cat_other, base, other))
+    elif cat_base == 's':
+        if cat_other == 's':
+            return merge_sequencies(base, other)
+        else:
+            return merge_sequencies(base, [other])
+    else:  # cat_base == 'c'
+        if cat_other == 's':
+            return merge_sequencies([base], other)
+        elif cat_other == 'm':
+            message = 'merge error ({}, {}): "{}" and "{}"'
+            raise ValueError(message.format(cat_base, cat_other, base, other))
+        else:
+            return other
+
+
+def merge_sequencies(s1, s2):
+    return type(s2)(chain(s1, s2))
+
+
+def merge_maps(m1, m2):
+    return type(m2)(chain(m1.iteritems(), m2.iteritems()))
+
+
+def categorize(v):
+    if hasattr(v, 'merge_with') and hasattr(v, 'merge_into'):
+        return 'o'
+    elif hasattr(v, 'iteritems'):
+        return 'm'
+    elif isiterable(v):
+        return 's'
+    elif v is None:
+        return 'n'
+    else:
+        return 'c'
+
+
+settings = WAConfiguration()
diff --git a/wa/framework/configuration/execution.py b/wa/framework/configuration/execution.py
new file mode 100644
index 00000000..908d7583
--- /dev/null
+++ b/wa/framework/configuration/execution.py
@@ -0,0 +1,67 @@
+from copy import copy
+from collections import OrderedDict
+
+from wa.framework import pluginloader
+from wa.framework.exception import ConfigError
+from wa.framework.configuration.core import ConfigurationPoint
+from wa.framework.utils.types import TreeNode, list_of, identifier
+
+
+class ExecConfig(object):
+
+    static_config_points = [
+            ConfigurationPoint(
+                'components',
+                kind=list_of(identifier),
+                description="""
+                Components to be activated.
+                """,
+            ),
+            ConfigurationPoint(
+                'runtime_parameters',
+                kind=list_of(identifier),
+                aliases=['runtime_params'],
+                description="""
+                Components to be activated.
+                """,
+            ),
+            ConfigurationPoint(
+                'classifiers',
+                kind=list_of(str),
+                description="""
+                Classifiers to be used. Classifiers are arbitrary key-value
+                pairs associated with with config. They may be used during output
+                proicessing and should be used to provide additional context for
+                collected results.
+                """,
+            ),
+    ]
+
+    config_points = None
+
+    @classmethod
+    def _load(cls, load_global=False, loader=pluginloader):
+        if cls.config_points is None:
+            cls.config_points = {c.name: c for c in cls.static_config_points}
+            for plugin in loader.list_plugins():
+                cp = ConfigurationPoint(
+                    plugin.name,
+                    kind=OrderedDict,
+                    description="""
+                    Configuration for {} plugin.
+                    """.format(plugin.name)
+                )
+                cls._add_config_point(plugin.name, cp)
+                for alias in plugin.aliases:
+                    cls._add_config_point(alias.name, cp)
+
+    @classmethod
+    def _add_config_point(cls, name, cp):
+        if name in cls.config_points:
+            message = 'Cofig point for "{}" already exists ("{}")'
+            raise ValueError(message.format(name, cls.config_points[name].name))
+
+
+
+class GlobalExecConfig(ExecConfig):
+
diff --git a/wa/framework/entrypoint.py b/wa/framework/entrypoint.py
new file mode 100644
index 00000000..f6bf4f51
--- /dev/null
+++ b/wa/framework/entrypoint.py
@@ -0,0 +1,83 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+import sys
+import argparse
+import logging
+import subprocess
+
+from wa.framework import pluginloader, log
+from wa.framework.configuration import settings
+from wa.framework.exception import WAError
+from wa.utils.doc import format_body
+from wa.utils.misc import init_argument_parser
+
+
+import warnings
+warnings.filterwarnings(action='ignore', category=UserWarning, module='zope')
+
+
+logger = logging.getLogger('wa')
+
+
+def init_settings():
+    settings.load_environment()
+    if not os.path.isdir(settings.user_directory):
+        settings.initialize_user_directory()
+    settings.load_user_config()
+
+
+def get_argument_parser():
+    description = ("Execute automated workloads on a remote device and process "
+                    "the resulting output.\n\nUse \"wa <subcommand> -h\" to see "
+                    "help for individual subcommands.")
+    parser = argparse.ArgumentParser(description=format_body(description, 80),
+                                        prog='wa',
+                                        formatter_class=argparse.RawDescriptionHelpFormatter,
+                                        )
+    init_argument_parser(parser)
+    return parser
+
+
+def load_commands(subparsers):
+    commands = {}
+    for command in pluginloader.list_commands():
+        commands[command.name] = pluginloader.get_command(command.name, subparsers=subparsers)
+    return commands
+
+
+def main():
+    try:
+        log.init()
+        init_settings()
+        parser = get_argument_parser()
+        commands = load_commands(parser.add_subparsers(dest='command'))  # each command will add its own subparser
+        args = parser.parse_args()
+        settings.set('verbosity', args.verbose)
+        if args.config:
+            settings.load_config_file(args.config)
+        log.set_level(settings.verbosity)
+        command = commands[args.command]
+        sys.exit(command.execute(args))
+    except KeyboardInterrupt:
+        logging.info('Got CTRL-C. Aborting.')
+        sys.exit(1)
+    except Exception as e:  # pylint: disable=broad-except
+        log_error(e, logger, critical=True)
+        if isinstance(e, WAError):
+            sys.exit(2)
+        else:
+            sys.exit(3)
+
diff --git a/wa/framework/exception.py b/wa/framework/exception.py
new file mode 100644
index 00000000..570c1e59
--- /dev/null
+++ b/wa/framework/exception.py
@@ -0,0 +1,139 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from wa.utils.misc import get_traceback, TimeoutError  # NOQA pylint: disable=W0611
+
+
+class WAError(Exception):
+    """Base class for all Workload Automation exceptions."""
+    pass
+
+
+class NotFoundError(WAError):
+    """Raised when the specified item is not found."""
+    pass
+
+
+class ValidationError(WAError):
+    """Raised on failure to validate an extension."""
+    pass
+
+
+class WorkloadError(WAError):
+    """General Workload error."""
+    pass
+
+
+class HostError(WAError):
+    """Problem with the host on which WA is running."""
+    pass
+
+
+class JobError(WAError):
+    """Job execution error."""
+    pass
+
+
+class InstrumentError(WAError):
+    """General Instrument error."""
+    pass
+
+
+class ResultProcessorError(WAError):
+    """General ResultProcessor error."""
+    pass
+
+
+class ResourceError(WAError):
+    """General Resolver error."""
+    pass
+
+
+class CommandError(WAError):
+    """Raised by commands when they have encountered an error condition
+    during execution."""
+    pass
+
+
+class ToolError(WAError):
+    """Raised by tools when they have encountered an error condition
+    during execution."""
+    pass
+
+
+class ConfigError(WAError):
+    """Raised when configuration provided is invalid. This error suggests that
+    the user should modify their config and try again."""
+    pass
+
+
+class SerializerSyntaxError(Exception):
+    """
+    Error loading a serialized structure from/to a file handle.
+    """
+
+    def __init__(self, message, line=None, column=None):
+        super(SerializerSyntaxError, self).__init__(message)
+        self.line = line
+        self.column = column
+
+    def __str__(self):
+        linestring = ' on line {}'.format(self.line) if self.line else ''
+        colstring = ' in column {}'.format(self.column) if self.column else ''
+        message = 'Syntax Error{}: {}'
+        return message.format(''.join([linestring, colstring]), self.message)
+
+
+class PluginLoaderError(WAError):
+    """Raised when there is an error loading an extension or
+    an external resource. Apart form the usual message, the __init__
+    takes an exc_info parameter which should be the result of
+    sys.exc_info() for the original exception (if any) that
+    caused the error."""
+
+    def __init__(self, message, exc_info=None):
+        super(PluginLoaderError, self).__init__(message)
+        self.exc_info = exc_info
+
+    def __str__(self):
+        if self.exc_info:
+            orig = self.exc_info[1]
+            orig_name = type(orig).__name__
+            if isinstance(orig, WAError):
+                reason = 'because of:\n{}: {}'.format(orig_name, orig)
+            else:
+                reason = 'because of:\n{}\n{}: {}'.format(get_traceback(self.exc_info), orig_name, orig)
+            return '\n'.join([self.message, reason])
+        else:
+            return self.message
+
+
+class WorkerThreadError(WAError):
+    """
+    This should get raised  in the main thread if a non-WAError-derived exception occurs on
+    a worker/background thread. If a WAError-derived exception is raised in the worker, then
+    it that exception should be re-raised on the main thread directly -- the main point of this is
+    to preserve the backtrace in the output, and backtrace doesn't get output for WAErrors.
+
+    """
+
+    def __init__(self, thread, exc_info):
+        self.thread = thread
+        self.exc_info = exc_info
+        orig = self.exc_info[1]
+        orig_name = type(orig).__name__
+        message = 'Exception of type {} occured on thread {}:\n'.format(orig_name, thread)
+        message += '{}\n{}: {}'.format(get_traceback(self.exc_info), orig_name, orig)
+        super(WorkerThreadError, self).__init__(message)
+
diff --git a/wa/framework/execution.py b/wa/framework/execution.py
new file mode 100644
index 00000000..1c072a3a
--- /dev/null
+++ b/wa/framework/execution.py
@@ -0,0 +1,369 @@
+import os
+import logging
+import shutil
+import random
+from copy import copy
+from collections import OrderedDict, defaultdict
+
+from wa.framework import pluginloader, signal, log
+from wa.framework.run import Runner, RunnerJob
+from wa.framework.output import RunOutput
+from wa.framework.actor import JobActor
+from wa.framework.resource import ResourceResolver
+from wa.framework.exception import ConfigError, NotFoundError
+from wa.framework.configuration import ConfigurationPoint, PluginConfiguration, WA_CONFIGURATION
+from wa.utils.serializer import read_pod
+from wa.utils.misc import ensure_directory_exists as _d, Namespace
+from wa.utils.types import list_of, identifier, caseless_string
+
+
+__all__ = [
+    'Executor',
+    'ExecutionOutput',
+    'ExecutionwContext',
+    'ExecuteWorkloadContainerActor',
+    'ExecuteWorkloadJobActor',
+]
+
+
+class Executor(object):
+
+    def __init__(self, output):
+        self.output = output
+        self.config = ExecutionRunConfiguration()
+        self.agenda_string =  None
+        self.agenda = None
+        self.jobs = None
+        self.container = None
+        self.target = None
+
+    def load_config(self, filepath):
+        self.config.update(filepath)
+
+    def load_agenda(self, agenda_string):
+        if self.agenda:
+            raise RuntimeError('Only one agenda may be loaded per run.')
+        self.agenda_string = agenda_string
+        if os.path.isfile(agenda_string):
+            self.logger.debug('Loading agenda from {}'.format(agenda_string))
+            self.agenda = Agenda(agenda_string)
+            shutil.copy(agenda_string, self.output.config_directory)
+        else:
+            self.logger.debug('"{}" is not a file; assuming workload name.'.format(agenda_string))
+            self.agenda = Agenda()
+            self.agenda.add_workload_entry(agenda_string)
+
+    def disable_instrument(self, name):
+        if not self.agenda:
+            raise RuntimeError('initialize() must be invoked before disable_instrument()')
+        self.agenda.config['instrumentation'].append('~{}'.format(itd))
+
+    def initialize(self):
+        if not self.agenda:
+            raise RuntimeError('No agenda has been loaded.')
+        self.config.update(self.agenda.config)
+        self.config.consolidate()
+        self._initialize_target()
+        self._initialize_job_config()
+
+    def execute(self, selectors=None):
+        pass
+
+    def finalize(self):
+        pass
+
+    def _initialize_target(self):
+        pass
+
+    def _initialize_job_config(self):
+        self.agenda.expand(self.target)
+        for tup in agenda_iterator(self.agenda, self.config.execution_order):
+            glob, sect, workload, iter_number = tup
+
+
+def agenda_iterator(agenda, order):
+    """
+    Iterates over all job components in an agenda, yielding tuples in the form ::
+
+        (global_entry, section_entry, workload_entry, iteration_number)
+
+    Which fully define the job to be crated. The order in which these tuples are 
+    yielded is determined by the ``order`` parameter which may be one of the following
+    values:
+
+    ``"by_iteration"`` 
+      The first iteration of each workload spec is executed one after the other,
+      so all workloads are executed before proceeding on to the second iteration.
+      E.g. A1 B1 C1 A2 C2 A3. This is the default if no order is explicitly specified.
+ 
+      In case of multiple sections, this will spread them out, such that specs
+      from the same section are further part. E.g. given sections X and Y, global
+      specs A and B, and two iterations, this will run ::
+ 
+                      X.A1, Y.A1, X.B1, Y.B1, X.A2, Y.A2, X.B2, Y.B2
+ 
+    ``"by_section"`` 
+      Same  as ``"by_iteration"``, however this will group specs from the same
+      section together, so given sections X and Y, global specs A and B, and two iterations, 
+      this will run ::
+ 
+              X.A1, X.B1, Y.A1, Y.B1, X.A2, X.B2, Y.A2, Y.B2
+ 
+    ``"by_spec"``
+      All iterations of the first spec are executed before moving on to the next
+      spec. E.g. A1 A2 A3 B1 C1 C2. 
+ 
+    ``"random"``
+      Execution order is entirely random.
+
+    """
+    # TODO: this would be a place to perform section expansions.
+    #       (e.g. sweeps, cross-products, etc).
+
+    global_iterations = agenda.global_.number_of_iterations
+    all_iterations = [global_iterations]
+    all_iterations.extend([s.number_of_iterations for s in agenda.sections])
+    all_iterations.extend([w.number_of_iterations for w in agenda.workloads])
+    max_iterations = max(all_iterations)
+
+    if order == 'by_spec':
+        if agenda.sections:
+            for section in agenda.sections:
+                section_iterations = section.number_of_iterations or global_iterations
+                for workload in agenda.workloads + section.workloads:
+                    workload_iterations =  workload.number_of_iterations or section_iterations
+                    for i in xrange(workload_iterations):
+                        yield agenda.global_, section, workload, i
+        else:  # not sections
+            for workload in agenda.workloads:
+                workload_iterations =  workload.number_of_iterations or global_iterations
+                for i in xrange(workload_iterations):
+                    yield agenda.global_, None, workload, i
+    elif order == 'by_section':
+        for i in xrange(max_iterations):
+            if agenda.sections:
+                for section in agenda.sections:
+                    section_iterations = section.number_of_iterations or global_iterations
+                    for workload in agenda.workloads + section.workloads:
+                        workload_iterations =  workload.number_of_iterations or section_iterations
+                        if i < workload_iterations:
+                            yield agenda.global_, section, workload, i
+            else:  # not sections
+                for workload in agenda.workloads:
+                    workload_iterations =  workload.number_of_iterations or global_iterations
+                    if i < workload_iterations:
+                        yield agenda.global_, None, workload, i
+    elif order == 'by_iteration':
+        for i in xrange(max_iterations):
+            if agenda.sections:
+                for workload in agenda.workloads:
+                    for section in agenda.sections:
+                        section_iterations = section.number_of_iterations or global_iterations
+                        workload_iterations =  workload.number_of_iterations or section_iterations or global_iterations
+                        if i < workload_iterations:
+                            yield agenda.global_, section, workload, i
+                # Now do the section-specific workloads
+                for section in agenda.sections:
+                    section_iterations = section.number_of_iterations or global_iterations
+                    for workload in section.workloads:
+                        workload_iterations =  workload.number_of_iterations or section_iterations or global_iterations
+                        if i < workload_iterations:
+                            yield agenda.global_, section, workload, i
+            else:  # not sections
+                for workload in agenda.workloads:
+                    workload_iterations =  workload.number_of_iterations or global_iterations
+                    if i < workload_iterations:
+                        yield agenda.global_, None, workload, i
+    elif order == 'random':
+        tuples = list(agenda_iterator(data, order='by_section'))
+        random.shuffle(tuples)
+        for t in tuples:
+            yield t
+    else:
+        raise ValueError('Invalid order: "{}"'.format(order))
+
+
+
+class RebootPolicy(object):
+    """
+    Represents the reboot policy for the execution -- at what points the device
+    should be rebooted. This, in turn, is controlled by the policy value that is
+    passed in on construction and would typically be read from the user's settings.
+    Valid policy values are:
+
+    :never: The device will never be rebooted.
+    :as_needed: Only reboot the device if it becomes unresponsive, or needs to be flashed, etc.
+    :initial: The device will be rebooted when the execution first starts, just before
+              executing the first workload spec.
+    :each_spec: The device will be rebooted before running a new workload spec.
+    :each_iteration: The device will be rebooted before each new iteration.
+
+    """
+
+    valid_policies = ['never', 'as_needed', 'initial', 'each_spec', 'each_iteration']
+
+    def __init__(self, policy):
+        policy = policy.strip().lower().replace(' ', '_')
+        if policy not in self.valid_policies:
+            message = 'Invalid reboot policy {}; must be one of {}'.format(policy, ', '.join(self.valid_policies))
+            raise ConfigError(message)
+        self.policy = policy
+
+    @property
+    def can_reboot(self):
+        return self.policy != 'never'
+
+    @property
+    def perform_initial_boot(self):
+        return self.policy not in ['never', 'as_needed']
+
+    @property
+    def reboot_on_each_spec(self):
+        return self.policy in ['each_spec', 'each_iteration']
+
+    @property
+    def reboot_on_each_iteration(self):
+        return self.policy == 'each_iteration'
+
+    def __str__(self):
+        return self.policy
+
+    __repr__ = __str__
+
+    def __cmp__(self, other):
+        if isinstance(other, RebootPolicy):
+            return cmp(self.policy, other.policy)
+        else:
+            return cmp(self.policy, other)
+
+
+class RuntimeParameterSetter(object):
+    """
+    Manages runtime parameter state during execution.
+
+    """
+
+    @property
+    def target(self):
+        return self.target_assistant.target
+
+    def __init__(self, target_assistant):
+        self.target_assistant = target_assistant
+        self.to_set = defaultdict(list) # name --> list of values 
+        self.last_set = {}
+        self.to_unset = defaultdict(int) # name --> count
+
+    def validate(self, params):
+        self.target_assistant.validate_runtime_parameters(params)
+
+    def mark_set(self, params):
+        for name, value in params.iteritems():
+            self.to_set[name].append(value)
+            
+    def mark_unset(self, params):
+        for name in params.iterkeys():
+            self.to_unset[name] += 1
+
+    def inact_set(self):
+        self.target_assistant.clear_parameters()
+        for name in self.to_set:
+            self._set_if_necessary(name)
+        self.target_assitant.set_parameters()
+        
+    def inact_unset(self):
+        self.target_assistant.clear_parameters()
+        for name, count in self.to_unset.iteritems():
+            while count:
+                self.to_set[name].pop()
+                count -= 1
+            self._set_if_necessary(name)
+        self.target_assitant.set_parameters()
+
+    def _set_if_necessary(self, name):
+        if not self.to_set[name]:
+            return
+        new_value = self.to_set[name][-1]
+        prev_value = self.last_set.get(name)
+        if new_value != prev_value:
+            self.target_assistant.add_paramter(name, new_value)
+            self.last_set[name] = new_value
+
+
+class WorkloadExecutionConfig(object):
+
+    @staticmethod
+    def from_pod(pod):
+        return WorkloadExecutionConfig(**pod)
+
+    def __init__(self, workload_name, workload_parameters=None,
+                 runtime_parameters=None, components=None, 
+                 assumptions=None):
+        self.workload_name = workload_name or None
+        self.workload_parameters = workload_parameters or {}
+        self.runtime_parameters = runtime_parameters or {}
+        self.components = components or {}
+        self.assumpations = assumptions or {}
+
+    def to_pod(self):
+        return copy(self.__dict__)
+
+
+class WorkloadExecutionActor(JobActor):
+
+    def __init__(self, target, config, loader=pluginloader):
+        self.target = target
+        self.config = config
+        self.logger = logging.getLogger('exec')
+        self.context = None
+        self.workload = loader.get_workload(config.workload_name, target, 
+                                            **config.workload_parameters)
+    def get_config(self):
+        return self.config.to_pod()
+
+    def initialize(self, context):
+        self.context = context
+        self.workload.init_resources(self.context)
+        self.workload.validate()
+        self.workload.initialize(self.context)
+
+    def run(self):
+        if not self.workload:
+            self.logger.warning('Failed to initialize workload; skipping execution')
+            return
+        self.pre_run()
+        self.logger.info('Setting up workload')
+        with signal.wrap('WORKLOAD_SETUP'):
+            self.workload.setup(self.context)
+        try:
+            error = None
+            self.logger.info('Executing workload')
+            try:
+                with signal.wrap('WORKLOAD_EXECUTION'):
+                    self.workload.run(self.context)
+            except Exception as e:
+                log.log_error(e, self.logger)
+                error = e
+
+            self.logger.info('Processing execution results')
+            with signal.wrap('WORKLOAD_RESULT_UPDATE'):
+                if not error:
+                    self.workload.update_result(self.context)
+                else:
+                    self.logger.info('Workload execution failed; not extracting workload results.')
+                    raise error
+        finally:
+            if self.target.check_responsive():
+                self.logger.info('Tearing down workload')
+                with signal.wrap('WORKLOAD_TEARDOWN'):
+                    self.workload.teardown(self.context)
+            self.post_run()
+
+    def finalize(self):
+        self.workload.finalize(self.context)
+
+    def pre_run(self):
+        # TODO: enable components, etc
+        pass
+
+    def post_run(self):
+        pass
diff --git a/wa/framework/host.py b/wa/framework/host.py
new file mode 100644
index 00000000..7c5e94aa
--- /dev/null
+++ b/wa/framework/host.py
@@ -0,0 +1,23 @@
+import os
+
+from wa.framework.configuration import settings
+from wa.framework.exception import ConfigError
+from wa.utils.misc import ensure_directory_exists
+
+
+class HostRunConfig(object):
+    """
+    Host-side configuration for a run.
+    """
+
+    def __init__(self, output_directory, 
+                 run_info_directory=None,
+                 run_config_directory=None):
+        self.output_directory = output_directory
+        self.run_info_directory = run_info_directory or os.path.join(self.output_directory, '_info')
+        self.run_config_directory = run_config_directory or os.path.join(self.output_directory, '_config')
+
+    def initialize(self):
+        ensure_directory_exists(self.output_directory)
+        ensure_directory_exists(self.run_info_directory)
+        ensure_directory_exists(self.run_config_directory)
diff --git a/wa/framework/log.py b/wa/framework/log.py
new file mode 100644
index 00000000..fe49c510
--- /dev/null
+++ b/wa/framework/log.py
@@ -0,0 +1,306 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+# pylint: disable=E1101
+import logging
+import string
+import threading
+import subprocess
+
+import colorama
+
+from wa.framework import signal
+from wa.framework.exception import WAError
+from wa.utils.misc import get_traceback
+
+
+COLOR_MAP = {
+    logging.DEBUG: colorama.Fore.BLUE,
+    logging.INFO: colorama.Fore.GREEN,
+    logging.WARNING: colorama.Fore.YELLOW,
+    logging.ERROR: colorama.Fore.RED,
+    logging.CRITICAL: colorama.Style.BRIGHT + colorama.Fore.RED,
+}
+
+RESET_COLOR = colorama.Style.RESET_ALL
+
+_indent_level = 0
+_indent_width = 4
+_console_handler = None
+
+
+def init(verbosity=logging.INFO, color=True, indent_with=4,
+         regular_fmt='%(levelname)-8s %(message)s',
+         verbose_fmt='%(asctime)s %(levelname)-8s %(name)-10.10s: %(message)s',
+         debug=False):
+    global _indent_width, _console_handler
+    _indent_width = indent_with
+    signal.log_error_func = lambda m: log_error(m, signal.logger)
+
+    root_logger = logging.getLogger()
+    root_logger.setLevel(logging.DEBUG)
+
+    error_handler = ErrorSignalHandler(logging.DEBUG)
+    root_logger.addHandler(error_handler)
+
+    _console_handler = logging.StreamHandler()
+    if color:
+        formatter = ColorFormatter
+    else:
+        formatter = LineFormatter
+    if verbosity:
+        _console_handler.setLevel(logging.DEBUG)
+        _console_handler.setFormatter(formatter(verbose_fmt))
+    else:
+        _console_handler.setLevel(logging.INFO)
+        _console_handler.setFormatter(formatter(regular_fmt))
+    root_logger.addHandler(_console_handler)
+    logging.basicConfig(level=logging.DEBUG)
+    if not debug:
+        logging.raiseExceptions = False
+
+
+def set_level(level):
+    _console_handler.setLevel(level)
+
+
+def add_file(filepath, level=logging.DEBUG,
+             fmt='%(asctime)s %(levelname)-8s %(name)s: %(message)-10.10s'):
+    root_logger = logging.getLogger()
+    file_handler = logging.FileHandler(filepath)
+    file_handler.setLevel(level)
+    file_handler.setFormatter(LineFormatter(fmt))
+    root_logger.addHandler(file_handler)
+
+
+def enable(logs):
+    if isinstance(logs, list):
+        for log in logs:
+            __enable_logger(log)
+    else:
+        __enable_logger(logs)
+
+
+def disable(logs):
+    if isinstance(logs, list):
+        for log in logs:
+            __disable_logger(log)
+    else:
+        __disable_logger(logs)
+
+
+def __enable_logger(logger):
+    if isinstance(logger, basestring):
+        logger = logging.getLogger(logger)
+    logger.propagate = True
+
+
+def __disable_logger(logger):
+    if isinstance(logger, basestring):
+        logger = logging.getLogger(logger)
+    logger.propagate = False
+
+
+def indent():
+    global _indent_level
+    _indent_level += 1
+
+
+def dedent():
+    global _indent_level
+    _indent_level -= 1
+
+
+def log_error(e, logger, critical=False):
+    """
+    Log the specified Exception as an error. The Error message will be formatted
+    differently depending on the nature of the exception.
+
+    :e: the error to log. should be an instance of ``Exception``
+    :logger: logger to be used.
+    :critical: if ``True``,  this error will be logged at ``logging.CRITICAL`` 
+               level, otherwise it will be logged as ``logging.ERROR``.
+    
+    """
+    if critical:
+        log_func = logger.critical
+    else:
+        log_func = logger.error
+
+    if isinstance(e, KeyboardInterrupt):
+        log_func('Got CTRL-C. Aborting.')
+    elif isinstance(e, WAError):
+        log_func(e)
+    elif isinstance(e, subprocess.CalledProcessError):
+        tb = get_traceback()
+        log_func(tb)
+        command = e.cmd
+        if e.args:
+            command = '{} {}'.format(command, ' '.join(e.args))
+        message = 'Command \'{}\' returned non-zero exit status {}\nOUTPUT:\n{}\n'
+        log_func(message.format(command, e.returncode, e.output))
+    elif isinstance(e, SyntaxError):
+        tb = get_traceback()
+        log_func(tb)
+        message = 'Syntax Error in {}, line {}, offset {}:'
+        log_func(message.format(e.filename, e.lineno, e.offset))
+        log_func('\t{}'.format(e.msg))
+    else:
+        tb = get_traceback()
+        log_func(tb)
+        log_func('{}({})'.format(e.__class__.__name__, e))
+
+
+class ErrorSignalHandler(logging.Handler):
+    """
+    Emits signals for ERROR and WARNING level traces.
+
+    """
+
+    def emit(self, record):
+        if record.levelno == logging.ERROR:
+            signal.send(signal.ERROR_LOGGED, self)
+        elif record.levelno == logging.WARNING:
+            signal.send(signal.WARNING_LOGGED, self)
+
+
+class LineFormatter(logging.Formatter):
+    """
+    Logs each line of the message separately.
+
+    """
+
+    def format(self, record):
+        record.message = record.getMessage()
+        if self.usesTime():
+            record.asctime = self.formatTime(record, self.datefmt)
+
+        indent = _indent_width * _indent_level
+        d = record.__dict__
+        parts = []
+        for line in record.message.split('\n'):
+            line = ' ' * indent + line
+            d.update({'message': line.strip('\r')})
+            parts.append(self._fmt % d)
+
+        return '\n'.join(parts)
+
+
+class ColorFormatter(LineFormatter):
+    """
+    Formats logging records with color and prepends record info
+    to each line of the message.
+
+        BLUE for DEBUG logging level
+        GREEN for INFO logging level
+        YELLOW for WARNING logging level
+        RED for ERROR logging level
+        BOLD RED for CRITICAL logging level
+
+    """
+
+    def __init__(self, fmt=None, datefmt=None):
+        super(ColorFormatter, self).__init__(fmt, datefmt)
+        template_text = self._fmt.replace('%(message)s', RESET_COLOR + '%(message)s${color}')
+        template_text = '${color}' + template_text + RESET_COLOR
+        self.fmt_template = string.Template(template_text)
+
+    def format(self, record):
+        self._set_color(COLOR_MAP[record.levelno])
+        return super(ColorFormatter, self).format(record)
+
+    def _set_color(self, color):
+        self._fmt = self.fmt_template.substitute(color=color)
+
+
+class BaseLogWriter(object):
+
+    def __init__(self, name, level=logging.DEBUG):
+        """
+        File-like object class designed to be used for logging from streams
+        Each complete line (terminated by new line character) gets logged
+        at DEBUG level. In complete lines are buffered until the next new line.
+
+        :param name: The name of the logger that will be used.
+
+        """
+        self.logger = logging.getLogger(name)
+        self.buffer = ''
+        if level == logging.DEBUG:
+            self.do_write = self.logger.debug
+        elif level == logging.INFO:
+            self.do_write = self.logger.info
+        elif level == logging.WARNING:
+            self.do_write = self.logger.warning
+        elif level == logging.ERROR:
+            self.do_write = self.logger.error
+        else:
+            raise Exception('Unknown logging level: {}'.format(level))
+
+    def flush(self):
+        # Defined to match the interface expected by pexpect.
+        return self
+
+    def close(self):
+        if self.buffer:
+            self.logger.debug(self.buffer)
+            self.buffer = ''
+        return self
+
+    def __del__(self):
+        # Ensure we don't lose bufferd output
+        self.close()
+
+
+class LogWriter(BaseLogWriter):
+
+    def write(self, data):
+        data = data.replace('\r\n', '\n').replace('\r', '\n')
+        if '\n' in data:
+            parts = data.split('\n')
+            parts[0] = self.buffer + parts[0]
+            for part in parts[:-1]:
+                self.do_write(part)
+            self.buffer = parts[-1]
+        else:
+            self.buffer += data
+        return self
+
+
+class LineLogWriter(BaseLogWriter):
+
+    def write(self, data):
+        self.do_write(data)
+
+
+class StreamLogger(threading.Thread):
+    """
+    Logs output from a stream in a thread.
+
+    """
+
+    def __init__(self, name, stream, level=logging.DEBUG, klass=LogWriter):
+        super(StreamLogger, self).__init__()
+        self.writer = klass(name, level)
+        self.stream = stream
+        self.daemon = True
+
+    def run(self):
+        line = self.stream.readline()
+        while line:
+            self.writer.write(line.rstrip('\n'))
+            line = self.stream.readline()
+        self.writer.close()
diff --git a/wa/framework/output.py b/wa/framework/output.py
new file mode 100644
index 00000000..49ce8721
--- /dev/null
+++ b/wa/framework/output.py
@@ -0,0 +1,362 @@
+import os
+import shutil
+import logging
+import uuid
+from copy import copy
+from datetime import datetime, timedelta
+
+from wa.framework import signal, log
+from wa.framework.configuration.core import merge_config_values
+from wa.utils import serializer
+from wa.utils.misc import enum_metaclass, ensure_directory_exists as _d
+from wa.utils.types import numeric
+
+
+class Status(object):
+
+    __metaclass__ = enum_metaclass('values', return_name=True)
+
+    values = [
+        'NEW',
+        'PENDING',
+        'RUNNING',
+        'COMPLETE',
+        'OK',
+        'OKISH',
+        'NONCRITICAL',
+        'PARTIAL',
+        'FAILED',
+        'ABORTED',
+        'SKIPPED',
+        'UNKNOWN',
+    ]
+
+
+class WAOutput(object):
+
+    basename = '.wa-output'
+
+    @classmethod
+    def load(cls, source):
+        if os.path.isfile(source):
+            pod = serializer.load(source)
+        elif os.path.isdir(source):
+            pod = serializer.load(os.path.join(source, cls.basename))
+        else:
+            message = 'Cannot load {} from {}'
+            raise ValueError(message.format(cls.__name__, source))
+        return cls.from_pod(pod)
+
+    @classmethod
+    def from_pod(cls, pod):
+        instance = cls(pod['output_directory'])
+        instance.status = pod['status']
+        instance.metrics = [Metric.from_pod(m) for m in pod['metrics']]
+        instance.artifacts = [Artifact.from_pod(a) for a in pod['artifacts']]
+        instance.events = [RunEvent.from_pod(e) for e in pod['events']]
+        instance.classifiers = pod['classifiers']
+        return instance
+
+    def __init__(self, output_directory):
+        self.logger = logging.getLogger('output')
+        self.output_directory = output_directory
+        self.status = Status.UNKNOWN
+        self.classifiers = {}
+        self.metrics = []
+        self.artifacts = []
+        self.events = []
+        
+    def initialize(self, overwrite=False):
+        if os.path.exists(self.output_directory):
+            if not overwrite:
+                raise RuntimeError('"{}" already exists.'.format(self.output_directory))
+            self.logger.info('Removing existing output directory.')
+            shutil.rmtree(self.output_directory)
+        self.logger.debug('Creating output directory {}'.format(self.output_directory))
+        os.makedirs(self.output_directory)
+
+    def add_metric(self, name, value, units=None, lower_is_better=False, classifiers=None):
+        classifiers = merge_config_values(self.classifiers, classifiers or {})
+        self.metrics.append(Metric(name, value, units, lower_is_better, classifiers))
+
+    def add_artifact(self, name, path, kind, *args, **kwargs):
+        path = _check_artifact_path(path, self.output_directory)
+        self.artifacts.append(Artifact(name, path, kind, Artifact.RUN, *args, **kwargs))
+
+    def get_path(self, subpath):
+        return os.path.join(self.output_directory, subpath)
+
+    def to_pod(self):
+        return {
+            'output_directory': self.output_directory,
+            'status': self.status,
+            'metrics': [m.to_pod() for m in self.metrics],
+            'artifacts': [a.to_pod() for a in self.artifacts],
+            'events': [e.to_pod() for e in self.events],
+            'classifiers': copy(self.classifiers),
+        }
+
+    def persist(self):
+        statefile = os.path.join(self.output_directory, self.basename)
+        with open(statefile, 'wb') as wfh:
+            serializer.dump(self, wfh)
+        
+
+class RunInfo(object):
+
+    default_name_format = 'wa-run-%y%m%d-%H%M%S'
+
+    def __init__(self, project=None, project_stage=None, name=None):
+        self.uuid = uuid.uuid4()
+        self.project = project
+        self.project_stage = project_stage
+        self.name = name or datetime.now().strftime(self.default_name_format)
+        self.start_time = None
+        self.end_time = None
+        self.duration = None
+
+    @staticmethod
+    def from_pod(pod):
+        instance = RunInfo()
+        instance.uuid = uuid.UUID(pod['uuid'])
+        instance.project = pod['project']
+        instance.project_stage = pod['project_stage']
+        instance.name = pod['name']
+        instance.start_time = pod['start_time']
+        instance.end_time = pod['end_time']
+        instance.duration = timedelta(seconds=pod['duration'])
+        return instance
+
+    def to_pod(self):
+        d = copy(self.__dict__)
+        d['uuid'] = str(self.uuid)
+        d['duration'] = self.duration.days * 3600 * 24 + self.duration.seconds
+        return d
+
+
+class RunOutput(WAOutput):
+
+    @property
+    def info_directory(self):
+        return _d(os.path.join(self.output_directory, '_info'))
+
+    @property
+    def config_directory(self):
+        return _d(os.path.join(self.output_directory, '_config'))
+
+    @property
+    def failed_directory(self):
+        return _d(os.path.join(self.output_directory, '_failed'))
+
+    @property
+    def log_file(self):
+        return os.path.join(self.output_directory, 'run.log')
+
+    @classmethod
+    def from_pod(cls, pod):
+        instance = WAOutput.from_pod(pod)
+        instance.info = RunInfo.from_pod(pod['info'])
+        instance.jobs = [JobOutput.from_pod(i) for i in pod['jobs']]
+        instance.failed = [JobOutput.from_pod(i) for i in pod['failed']]
+        return instance
+
+    def __init__(self, output_directory):
+        super(RunOutput, self).__init__(output_directory)
+        self.logger = logging.getLogger('output')
+        self.info = RunInfo()
+        self.jobs = []
+        self.failed = []
+
+    def initialize(self, overwrite=False):
+        super(RunOutput, self).initialize(overwrite)
+        log.add_file(self.log_file)
+        self.add_artifact('runlog', self.log_file,  'log')
+
+    def create_job_output(self, id):
+        outdir = os.path.join(self.output_directory, id)
+        job_output = JobOutput(outdir)
+        self.jobs.append(job_output)
+        return job_output
+
+    def move_failed(self, job_output):
+        basename = os.path.basename(job_output.output_directory)
+        i = 1
+        dest = os.path.join(self.failed_directory, basename + '-{}'.format(i))
+        while os.path.exists(dest):
+            i += 1
+            dest = '{}-{}'.format(dest[:-2], i)
+        shutil.move(job_output.output_directory, dest)
+
+    def to_pod(self):
+        pod = super(RunOutput, self).to_pod()
+        pod['info'] = self.info.to_pod()
+        pod['jobs'] = [i.to_pod() for i in self.jobs]
+        pod['failed'] = [i.to_pod() for i in self.failed]
+        return pod
+
+
+class JobOutput(WAOutput):
+
+    def add_artifact(self, name, path, kind, *args, **kwargs):
+        path = _check_artifact_path(path, self.output_directory)
+        self.artifacts.append(Artifact(name, path, kind, Artifact.ITERATION, *args, **kwargs))
+
+
+class Artifact(object):
+    """
+    This is an artifact generated during execution/post-processing of a workload.
+    Unlike metrics, this represents an actual artifact, such as a file, generated.
+    This may be "result", such as trace, or it could be "meta data" such as logs.
+    These are distinguished using the ``kind`` attribute, which also helps WA decide
+    how it should be handled. Currently supported kinds are:
+
+        :log: A log file. Not part of "results" as such but contains information about the
+              run/workload execution that be useful for diagnostics/meta analysis.
+        :meta: A file containing metadata. This is not part of "results", but contains
+               information that may be necessary to reproduce the results (contrast with
+               ``log`` artifacts which are *not* necessary).
+        :data: This file contains new data, not available otherwise and should be considered
+               part of the "results" generated by WA. Most traces would fall into this category.
+        :export: Exported version of results or some other artifact. This signifies that
+                 this artifact does not contain any new data that is not available
+                 elsewhere and that it may be safely discarded without losing information.
+        :raw: Signifies that this is a raw dump/log that is normally processed to extract
+              useful information and is then discarded. In a sense, it is the opposite of
+              ``export``, but in general may also be discarded.
+
+              .. note:: whether a file is marked as ``log``/``data`` or ``raw`` depends on
+                        how important it is to preserve this file, e.g. when archiving, vs
+                        how much space it takes up. Unlike ``export`` artifacts which are
+                        (almost) always ignored by other exporters as that would never result
+                        in data loss, ``raw`` files *may* be processed by exporters if they
+                        decided that the risk of losing potentially (though unlikely) useful
+                        data is greater than the time/space cost of handling the artifact (e.g.
+                        a database uploader may choose to ignore ``raw`` artifacts, where as a
+                        network filer archiver may choose to archive them).
+
+        .. note: The kind parameter is intended to represent the logical function of a particular
+                 artifact, not it's intended means of processing -- this is left entirely up to the
+                 result processors.
+
+    """
+
+    RUN = 'run'
+    ITERATION = 'iteration'
+
+    valid_kinds = ['log', 'meta', 'data', 'export', 'raw']
+
+    @staticmethod
+    def from_pod(pod):
+        return Artifact(**pod)
+
+    def __init__(self, name, path, kind, level=RUN, mandatory=False, description=None):
+        """"
+        :param name: Name that uniquely identifies this artifact.
+        :param path: The *relative* path of the artifact. Depending on the ``level``
+                     must be either relative to the run or iteration output directory.
+                     Note: this path *must* be delimited using ``/`` irrespective of the
+                     operating system.
+        :param kind: The type of the artifact this is (e.g. log file, result, etc.) this
+                     will be used a hit to result processors. This must be one of ``'log'``,
+                     ``'meta'``, ``'data'``, ``'export'``, ``'raw'``.
+        :param level: The level at which the artifact will be generated. Must be either
+                      ``'iteration'`` or ``'run'``.
+        :param mandatory: Boolean value indicating whether this artifact must be present
+                          at the end of result processing for its level.
+        :param description: A free-form description of what this artifact is.
+
+        """
+        if kind not in self.valid_kinds:
+            raise ValueError('Invalid Artifact kind: {}; must be in {}'.format(kind, self.valid_kinds))
+        self.name = name
+        self.path = path.replace('/', os.sep) if path is not None else path
+        self.kind = kind
+        self.level = level
+        self.mandatory = mandatory
+        self.description = description
+
+    def exists(self, context):
+        """Returns ``True`` if artifact exists within the specified context, and
+        ``False`` otherwise."""
+        fullpath = os.path.join(context.output_directory, self.path)
+        return os.path.exists(fullpath)
+
+    def to_pod(self):
+        return copy(self.__dict__)
+
+
+class RunEvent(object):
+    """
+    An event that occured during a run.
+
+    """
+
+    @staticmethod
+    def from_pod(pod):
+        instance = RunEvent(pod['message'])
+        instance.timestamp = pod['timestamp']
+        return instance
+
+    def __init__(self, message):
+        self.timestamp = datetime.utcnow()
+        self.message = message
+
+    def to_pod(self):
+        return copy(self.__dict__)
+
+    def __str__(self):
+        return '{} {}'.format(self.timestamp, self.message)
+
+    __repr__ = __str__
+
+
+class Metric(object):
+    """
+    This is a single metric collected from executing a workload.
+
+    :param name: the name of the metric. Uniquely identifies the metric
+                 within the results.
+    :param value: The numerical value of the metric for this execution of
+                  a workload. This can be either an int or a float.
+    :param units: Units for the collected value. Can be None if the value
+                  has no units (e.g. it's a count or a standardised score).
+    :param lower_is_better: Boolean flag indicating where lower values are
+                            better than higher ones. Defaults to False.
+    :param classifiers: A set of key-value pairs to further classify this metric
+                        beyond current iteration (e.g. this can be used to identify
+                        sub-tests).
+
+    """
+
+    @staticmethod
+    def from_pod(pod):
+        return Metric(**pod)
+
+    def __init__(self, name, value, units=None, lower_is_better=False, classifiers=None):
+        self.name = name
+        self.value = numeric(value)
+        self.units = units
+        self.lower_is_better = lower_is_better
+        self.classifiers = classifiers or {}
+
+    def to_pod(self):
+        return copy(self.__dict__)
+
+    def __str__(self):
+        result = '{}: {}'.format(self.name, self.value)
+        if self.units:
+            result += ' ' + self.units
+        result += ' ({})'.format('-' if self.lower_is_better else '+')
+        return '<{}>'.format(result)
+
+    __repr__ = __str__
+
+
+def _check_artifact_path(path, rootpath):
+    if path.startswith(rootpath):
+        return os.path.abspath(path)
+    rootpath = os.path.abspath(rootpath)
+    full_path = os.path.join(rootpath, path)
+    if not os.path.isfile(full_path):
+        raise ValueError('Cannot add artifact because {} does not exist.'.format(full_path))
+    return full_path
diff --git a/wa/framework/plugin.py b/wa/framework/plugin.py
new file mode 100644
index 00000000..fd5b159f
--- /dev/null
+++ b/wa/framework/plugin.py
@@ -0,0 +1,734 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+# pylint: disable=E1101
+import os
+import sys
+import inspect
+import imp
+import string
+import logging
+from copy import copy
+from itertools import chain
+from collections import OrderedDict, defaultdict
+
+from wa.framework import log
+from wa.framework.exception import ValidationError, ConfigError, NotFoundError, PluginLoaderError
+from wa.framework.configuration.core import ConfigurationPoint, ConfigurationPointCollection
+from wa.utils.misc import isiterable, ensure_directory_exists as _d, get_article
+from wa.utils.misc import walk_modules, get_article
+from wa.utils.types import identifier, integer, boolean, caseless_string
+
+
+class Parameter(ConfigurationPoint):
+
+    is_runtime = False
+
+    def __init__(self, name, 
+                 kind=None, 
+                 mandatory=None, 
+                 default=None, 
+                 override=False,
+                 allowed_values=None, 
+                 description=None, 
+                 constraint=None, 
+                 convert_types=True,
+                 global_alias=None,
+                 reconfigurable=True):
+        """
+        :param global_alias: This is an alternative alias for this parameter,
+                             unlike the name, this alias will not be
+                             namespaced under the owning extension's name
+                             (hence the global part). This is introduced
+                             primarily for backward compatibility -- so that
+                             old extension settings names still work. This
+                             should not be used for new parameters.
+
+        :param reconfigurable: This indicated whether this parameter may be 
+                               reconfigured during the run (e.g. between different
+                               iterations). This determines where in run configruation
+                               this parameter may appear.
+
+        For other parameters, see docstring for 
+        ``wa.framework.configuration.core.ConfigurationPoint``
+
+        """
+        super(Parameter, self).__init__(name, kind, mandatory,
+                                        default, override, allowed_values,
+                                        description, constraint,
+                                        convert_types)
+        self.global_alias = global_alias
+        self.reconfigurable = reconfigurable
+
+    def __repr__(self):
+        d = copy(self.__dict__)
+        del d['description']
+        return 'Param({})'.format(d)
+
+
+class PluginAliasCollection(object):
+    """
+    Accumulator for extension attribute objects (such as Parameters). This will
+    replace any class member list accumulating such attributes through the magic of
+    metaprogramming\ [*]_.
+
+    .. [*] which is totally safe and not going backfire in any way...
+
+    """
+
+    @property
+    def values(self):
+        return self._attrs.values()
+
+    def __init__(self):
+        self._attrs = OrderedDict()
+
+    def add(self, p):
+        p = self._to_attrcls(p)
+        if p.name in self._attrs:
+            if p.override:
+                newp = copy(self._attrs[p.name])
+                for a, v in p.__dict__.iteritems():
+                    if v is not None:
+                        setattr(newp, a, v)
+                self._attrs[p.name] = newp
+            else:
+                # Duplicate attribute condition is check elsewhere.
+                pass
+        else:
+            self._attrs[p.name] = p
+
+    append = add
+
+    def __str__(self):
+        return 'AC({})'.format(map(str, self._attrs.values()))
+
+    __repr__ = __str__
+
+    def _to_attrcls(self, p):
+        if isinstance(p, tuple) or isinstance(p, list):
+            # must be in the form (name, {param: value, ...})
+            p = Alias(p[1], **p[1])
+        elif not isinstance(p, Alias):
+            raise ValueError('Invalid parameter value: {}'.format(p))
+        if p.name in self._attrs:
+            raise ValueError('Attribute {} has already been defined.'.format(p.name))
+        return p
+
+    def __iadd__(self, other):
+        for p in other:
+            self.add(p)
+        return self
+
+    def __iter__(self):
+        return iter(self.values)
+
+    def __contains__(self, p):
+        return p in self._attrs
+
+    def __getitem__(self, i):
+        return self._attrs[i]
+
+    def __len__(self):
+        return len(self._attrs)
+
+
+class Alias(object):
+    """
+    This represents a configuration alias for an extension, mapping an alternative name to
+    a set of parameter values, effectively providing an alternative set of default values.
+
+    """
+
+    def __init__(self, name, **kwargs):
+        self.name = name
+        self.parameters = kwargs
+        self.plugin_name = None  # gets set by the MetaClass
+
+    def validate(self, plugin):
+        plugin_params = set(p.name for p in plugin.parameters)
+        for param in self.parameters:
+            if param not in plugin_params:
+                # Raising config error because aliases might have come through
+                # the config.
+                msg = 'Parameter {} (defined in alias {}) is invalid for {}'
+                raise ValueError(msg.format(param, self.name, plugin.name))
+
+
+class PluginMeta(type):
+    """
+    This basically adds some magic to extensions to make implementing new extensions, such as
+    workloads less complicated.
+
+    It ensures that certain class attributes (specified by the ``to_propagate``
+    attribute of the metaclass) get propagated down the inheritance hierarchy. The assumption
+    is that the values of the attributes specified in the class are iterable; if that is not met,
+    Bad Things(tm) will happen.
+
+    This also provides "virtual" method implementations. The ``super``'s version of these
+    methods (specified by the ``virtual_methods`` attribute of the metaclass) will be 
+    automatically invoked.
+
+    """
+
+    to_propagate = [
+        ('parameters', ConfigurationPointCollection),
+    ]
+
+    #virtual_methods = ['validate', 'initialize', 'finalize']
+    virtual_methods = []
+
+    def __new__(mcs, clsname, bases, attrs):
+        mcs._propagate_attributes(bases, attrs)
+        cls = type.__new__(mcs, clsname, bases, attrs)
+        mcs._setup_aliases(cls)
+        mcs._implement_virtual(cls, bases)
+        return cls
+
+    @classmethod
+    def _propagate_attributes(mcs, bases, attrs):
+        """
+        For attributes specified by to_propagate, their values will be a union of
+        that specified for cls and it's bases (cls values overriding those of bases
+        in case of conflicts).
+
+        """
+        for prop_attr, attr_collector_cls in mcs.to_propagate:
+            should_propagate = False
+            propagated = attr_collector_cls()
+            for base in bases:
+                if hasattr(base, prop_attr):
+                    propagated += getattr(base, prop_attr) or []
+                    should_propagate = True
+            if prop_attr in attrs:
+                propagated += attrs[prop_attr] or []
+                should_propagate = True
+            if should_propagate:
+                attrs[prop_attr] = propagated
+
+    @classmethod
+    def _setup_aliases(mcs, cls):
+        if hasattr(cls, 'aliases'):
+            aliases, cls.aliases = cls.aliases, PluginAliasCollection()
+            for alias in aliases:
+                if isinstance(alias, basestring):
+                    alias = Alias(alias)
+                alias.validate(cls)
+                alias.plugin_name = cls.name
+                cls.aliases.add(alias)
+
+    @classmethod
+    def _implement_virtual(mcs, cls, bases):
+        """
+        This implements automatic method propagation to the bases, so
+        that you don't have to do something like
+
+            super(cls, self).vmname()
+
+        This also ensures that the methods that have beend identified as
+        "globally virtual" are executed exactly once per WA execution, even if
+        invoked through instances of different subclasses
+
+        """
+        methods = {}
+        called_globals = set()
+        for vmname in mcs.virtual_methods:
+            clsmethod = getattr(cls, vmname, None)
+            if clsmethod:
+                basemethods = [getattr(b, vmname) for b in bases if hasattr(b, vmname)]
+                methods[vmname] = [bm for bm in basemethods if bm != clsmethod]
+                methods[vmname].append(clsmethod)
+
+                def generate_method_wrapper(vname):  # pylint: disable=unused-argument
+                    # this creates a closure with the method name so that it
+                    # does not need to be passed to the wrapper as an argument,
+                    # leaving the wrapper to accept exactly the same set of
+                    # arguments as the method it is wrapping.
+                    name__ = vmname  # pylint: disable=cell-var-from-loop
+
+                    def wrapper(self, *args, **kwargs):
+                        for dm in methods[name__]:
+                            dm(self, *args, **kwargs)
+                    return wrapper
+
+                setattr(cls, vmname, generate_method_wrapper(vmname))
+
+
+class Plugin(object):
+    """
+    Base class for all WA plugins.
+    A plugin extends the functionality of WA in some way. Plugins are discovered
+    and loaded dynamically by the plugin loader upon invocation of WA scripts.
+    Adding an extension is a matter of placing a class that implements an appropriate
+    interface somewhere it would be discovered by the loader. That "somewhere" is
+    typically one of the plugin subdirectories under ``~/.workload_automation/``.
+
+    """
+    __metaclass__ = PluginMeta
+
+    name = None
+    kind = None
+    parameters = []
+    aliases = []
+
+    @classmethod
+    def get_default_config(cls):
+        return {p.name: p.default for p in cls.parameters}
+
+    @classmethod
+    def get_parameter(cls, name):
+        for param in cls.parameters:
+            if param.name == name or name in param.aliases:
+                return param
+
+    def __init__(self, **kwargs):
+        self.logger = logging.getLogger(self.name)
+        self.capabilities = getattr(self.__class__, 'capabilities', [])
+        self.update_config(**kwargs)
+
+    def get_config(self):
+        """
+        Returns current configuration (i.e. parameter values) of this plugin.
+
+        """
+        config = {}
+        for param in self.parameters:
+            config[param.name] = getattr(self, param.name, None)
+        return config
+
+    def update_config(self, **kwargs):
+        """
+        Updates current configuration (i.e. parameter values) of this plugin.
+
+        """
+        for param in self.parameters:
+            param.set_value(self, kwargs.get(param.name))
+        for key in kwargs:
+            if key not in self.parameters:
+                message = 'Unexpected parameter "{}" for {}'
+                raise ConfigError(message.format(key, self.name))
+
+    def validate(self):
+        """
+        Perform basic validation to ensure that this extension is capable of running.
+        This is intended as an early check to ensure the extension has not been mis-configured,
+        rather than a comprehensive check (that may, e.g., require access to the execution
+        context).
+
+        This method may also be used to enforce (i.e. set as well as check) inter-parameter
+        constraints for the extension (e.g. if valid values for parameter A depend on the value
+        of parameter B -- something that is not possible to enforce using ``Parameter``\ 's
+        ``constraint`` attribute.
+
+        """
+        if self.name is None:
+            raise ValidationError('name not set for {}'.format(self.__class__.__name__))
+        if self.kind is None:
+            raise ValidationError('kind not set for {}'.format(self.name))
+        for param in self.parameters:
+            param.validate(self)
+
+    def initialize(self, context):
+        pass
+
+    def finalize(self, context):
+        pass
+
+    def has(self, capability):
+        """Check if this extension has the specified capability. The alternative method ``can`` is
+        identical to this. Which to use is up to the caller depending on what makes semantic sense
+        in the context of the capability, e.g. ``can('hard_reset')`` vs  ``has('active_cooling')``."""
+        return capability in self.capabilities
+
+    can = has
+
+
+class TargetedPluginMeta(PluginMeta):
+
+    to_propagate = PluginMeta.to_propagate + [
+        ('supported_targets', list),
+        ('supported_platforms', list),
+    ]
+    virtual_methods = PluginMeta.virtual_methods + [
+        'validate_on_target',
+    ]
+
+
+class TargetedPlugin(Plugin):
+    """
+    A plugin that operates on a target device.  These kinds of plugins are created
+    with a ``devlib.Target`` instance and may only support certain kinds of targets.
+
+    """
+
+    __metaclass__ = TargetedPluginMeta
+
+    supported_targets = []
+    supported_platforms = []
+
+    def __init__(self, target, **kwargs):
+        super(TargetedPlugin, self).__init__(**kwargs)
+        if self.supported_targets and target.os not in self.supported_targets:
+            raise TargetError('Plugin {} does not support target {}'.format(self.name, target.name))
+        if self.supported_platforms and target.platform.name not in self.supported_platforms:
+            raise TargetError('Plugin {} does not support platform {}'.format(self.name, target.platform))
+        self.target = target
+
+    def validate_on_target(self):
+        """
+        This will be invoked once at the beginning of a run after a ``Target`` 
+        has been connected and initialized. This is intended for validation 
+        that cannot be performed offline but does not depend on ephemeral 
+        state that is likely to change during the course of a run (validation
+        against such states should be done during setup of a particular
+        execution.
+
+        """
+        pass
+
+
+class GlobalParameterAlias(object):
+    """
+    Represents a "global alias" for an plugin parameter. A global alias
+    is specified at the top-level of config rather namespaced under an plugin
+    name.
+
+    Multiple plugins may have parameters with the same global_alias if they are
+    part of the same inheritance hierarchy and one parameter is an override of the
+    other. This class keeps track of all such cases in its plugins dict.
+
+    """
+
+    def __init__(self, name):
+        self.name = name
+        self.plugins = {}
+
+    def iteritems(self):
+        for ext in self.plugins.itervalues():
+            yield (self.get_param(ext), ext)
+
+    def get_param(self, ext):
+        for param in ext.parameters:
+            if param.global_alias == self.name:
+                return param
+        message = 'Plugin {} does not have a parameter with global alias {}'
+        raise ValueError(message.format(ext.name, self.name))
+
+    def update(self, other_ext):
+        self._validate_ext(other_ext)
+        self.plugins[other_ext.name] = other_ext
+
+    def _validate_ext(self, other_ext):
+        other_param = self.get_param(other_ext)
+        for param, ext in self.iteritems():
+            if ((not (issubclass(ext, other_ext) or issubclass(other_ext, ext))) and
+                    other_param.kind != param.kind):
+                message = 'Duplicate global alias {} declared in {} and {} plugins with different types'
+                raise PluginLoaderError(message.format(self.name, ext.name, other_ext.name))
+            if not param.name == other_param.name:
+                message = 'Two params {} in {} and {} in {} both declare global alias {}'
+                raise PluginLoaderError(message.format(param.name, ext.name,
+                                                 other_param.name, other_ext.name, self.name))
+
+    def __str__(self):
+        text = 'GlobalAlias({} => {})'
+        extlist = ', '.join(['{}.{}'.format(e.name, p.name) for p, e in self.iteritems()])
+        return text.format(self.name, extlist)
+
+
+MODNAME_TRANS = string.maketrans(':/\\.', '____')
+
+class PluginLoader(object):
+    """
+    Discovers, enumerates and loads available devices, configs, etc.
+    The loader will attempt to discover things on construction by looking
+    in predetermined set of locations defined by default_paths. Optionally,
+    additional locations may specified through paths parameter that must
+    be a list of additional Python module paths (i.e. dot-delimited).
+
+    """
+
+
+    def __init__(self, packages=None, paths=None, ignore_paths=None, keep_going=False):
+        """
+        params::
+
+            :packages: List of packages to load plugins from.
+            :paths: List of paths to be searched for Python modules containing
+                    WA plugins.
+            :ignore_paths: List of paths to ignore when search for WA plugins (these would
+                           typically be subdirectories of one or more locations listed in
+                           ``paths`` parameter.
+            :keep_going: Specifies whether to keep going if an error occurs while loading
+                         plugins.
+        """
+        self.logger = logging.getLogger('pluginloader')
+        self.keep_going = keep_going
+        self.packages = packages or []
+        self.paths = paths or []
+        self.ignore_paths = ignore_paths or []
+        self.plugins = {}
+        self.kind_map = defaultdict(dict)
+        self.aliases = {}
+        self.global_param_aliases = {}
+        self._discover_from_packages(self.packages)
+        self._discover_from_paths(self.paths, self.ignore_paths)
+
+    def update(self, packages=None, paths=None, ignore_paths=None):
+        """ Load plugins from the specified paths/packages
+        without clearing or reloading existing plugin. """
+        if packages:
+            self.packages.extend(packages)
+            self._discover_from_packages(packages)
+        if paths:
+            self.paths.extend(paths)
+            self.ignore_paths.extend(ignore_paths or [])
+            self._discover_from_paths(paths, ignore_paths or [])
+
+    def clear(self):
+        """ Clear all discovered items. """
+        self.plugins = []
+        self.kind_map.clear()
+
+    def reload(self):
+        """ Clear all discovered items and re-run the discovery. """
+        self.clear()
+        self._discover_from_packages(self.packages)
+        self._discover_from_paths(self.paths, self.ignore_paths)
+
+    def get_plugin_class(self, name, kind=None):
+        """
+        Return the class for the specified plugin if found or raises ``ValueError``.
+
+        """
+        name, _ = self.resolve_alias(name)
+        if kind is None:
+            try:
+                return self.plugins[name]
+            except KeyError:
+                raise NotFoundError('Plugins {} not found.'.format(name))
+        if kind not in self.kind_map:
+            raise ValueError('Unknown plugin type: {}'.format(kind))
+        store = self.kind_map[kind]
+        if name not in store:
+            raise NotFoundError('Plugins {} is not {} {}.'.format(name, get_article(kind), kind))
+        return store[name]
+
+    def get_plugin(self, name, kind=None, *args, **kwargs):
+        """
+        Return plugin of the specified kind with the specified name. Any
+        additional parameters will be passed to the plugin's __init__.
+
+        """
+        name, base_kwargs = self.resolve_alias(name)
+        kwargs = OrderedDict(chain(base_kwargs.iteritems(), kwargs.iteritems()))
+        cls = self.get_plugin_class(name, kind)
+        plugin = cls(*args, **kwargs)
+        return plugin
+
+    def get_default_config(self, name):
+        """
+        Returns the default configuration for the specified plugin name. The
+        name may be an alias, in which case, the returned config will be
+        augmented with appropriate alias overrides.
+
+        """
+        real_name, alias_config = self.resolve_alias(name)
+        base_default_config = self.get_plugin_class(real_name).get_default_config()
+        return merge_dicts(base_default_config, alias_config, list_duplicates='last', dict_type=OrderedDict)
+
+    def list_plugins(self, kind=None):
+        """
+        List discovered plugin classes. Optionally, only list plugins of a
+        particular type.
+
+        """
+        if kind is None:
+            return self.plugins.values()
+        if kind not in self.kind_map:
+            raise ValueError('Unknown plugin type: {}'.format(kind))
+        return self.kind_map[kind].values()
+
+    def has_plugin(self, name, kind=None):
+        """
+        Returns ``True`` if an plugins with the specified ``name`` has been
+        discovered by the loader. If ``kind`` was specified, only returns ``True``
+        if the plugin has been found, *and* it is of the specified kind.
+
+        """
+        try:
+            self.get_plugin_class(name, kind)
+            return True
+        except NotFoundError:
+            return False
+
+    def resolve_alias(self, alias_name):
+        """
+        Try to resolve the specified name as an plugin alias. Returns a
+        two-tuple, the first value of which is actual plugin name, and the
+        iisecond is a dict of parameter values for this alias. If the name passed
+        is already an plugin name, then the result is ``(alias_name, {})``.
+
+        """
+        alias_name = identifier(alias_name.lower())
+        if alias_name in self.plugins:
+            return (alias_name, {})
+        if alias_name in self.aliases:
+            alias = self.aliases[alias_name]
+            return (alias.plugin_name, alias.parameters)
+        raise NotFoundError('Could not find plugin or alias "{}"'.format(alias_name))
+
+    # Internal methods.
+
+    def __getattr__(self, name):
+        """
+        This resolves methods for specific plugins types based on corresponding
+        generic plugin methods. So it's possible to say things like ::
+
+            loader.get_device('foo')
+
+        instead of ::
+
+            loader.get_plugin('foo', kind='device')
+
+        """
+        if name.startswith('get_'):
+            name = name.replace('get_', '', 1)
+            if name in self.kind_map:
+                def __wrapper(pname, *args, **kwargs):
+                    return self.get_plugin(pname, name, *args, **kwargs)
+                return __wrapper
+        if name.startswith('list_'):
+            name = name.replace('list_', '', 1).rstrip('s')
+            if name in self.kind_map:
+                def __wrapper(*args, **kwargs):
+                    return self.list_plugins(name, *args, **kwargs)
+                return __wrapper
+        if name.startswith('has_'):
+            name = name.replace('has_', '', 1)
+            if name in self.kind_map:
+                def __wrapper(pname, *args, **kwargs):
+                    return self.has_plugin(pname, name, *args, **kwargs)
+                return __wrapper
+        raise AttributeError(name)
+
+
+    def _discover_from_packages(self, packages):
+        self.logger.debug('Discovering plugins in packages')
+        try:
+            for package in packages:
+                for module in walk_modules(package):
+                    self._discover_in_module(module)
+        except ImportError as e:
+            source = getattr(e, 'path', package)
+            message = 'Problem loading plugins from {}: {}'
+            raise PluginLoaderError(message.format(source, e.message))
+
+    def _discover_from_paths(self, paths, ignore_paths):
+        paths = paths or []
+        ignore_paths = ignore_paths or []
+        self.logger.debug('Discovering plugins in paths')
+        for path in paths:
+            self.logger.debug('Checking path %s', path)
+            if os.path.isfile(path):
+                self._discover_from_file(path)
+            for root, _, files in os.walk(path, followlinks=True):
+                should_skip = False
+                for igpath in ignore_paths:
+                    if root.startswith(igpath):
+                        should_skip = True
+                        break
+                if should_skip:
+                    continue
+                for fname in files:
+                    if not os.path.splitext(fname)[1].lower() == '.py':
+                        continue
+                    filepath = os.path.join(root, fname)
+                    self._discover_from_file(filepath)
+
+    def _discover_from_file(self, filepath):
+        try:
+            modname = os.path.splitext(filepath[1:])[0].translate(MODNAME_TRANS)
+            module = imp.load_source(modname, filepath)
+            self._discover_in_module(module)
+        except (SystemExit, ImportError), e:
+            if self.keep_going:
+                self.logger.warning('Failed to load {}'.format(filepath))
+                self.logger.warning('Got: {}'.format(e))
+            else:
+                raise PluginLoaderError('Failed to load {}'.format(filepath), sys.exc_info())
+        except Exception as e:
+            message = 'Problem loading plugins from {}: {}'
+            raise PluginLoaderError(message.format(filepath, e))
+
+    def _discover_in_module(self, module):  # NOQA pylint: disable=too-many-branches
+        self.logger.debug('Checking module %s', module.__name__)
+        log.indent()
+        try:
+            for obj in vars(module).itervalues():
+                if inspect.isclass(obj):
+                    if not issubclass(obj, Plugin):
+                        continue
+                    if not obj.kind:
+                        message = 'Skipping plugin {} as it does not define a kind'
+                        self.logger.debug(message.format(obj.__name__))
+                        continue
+                    if not obj.name:
+                        message = 'Skipping {} {} as it does not define a name'
+                        self.logger.debug(message.format(obj.kind, obj.__name__))
+                        continue
+                    try:
+                        self._add_found_plugin(obj)
+                    except PluginLoaderError as e:
+                        if self.keep_going:
+                            self.logger.warning(e)
+                        else:
+                            raise e
+        finally:
+            log.dedent()
+
+    def _add_found_plugin(self, obj):
+        """
+            :obj: Found plugin class
+            :ext: matching plugin item.
+        """
+        self.logger.debug('Adding %s %s', obj.kind, obj.name)
+        key = identifier(obj.name.lower())
+        if key in self.plugins or key in self.aliases:
+            raise PluginLoaderError('{} "{}" already exists.'.format(obj.kind, obj.name))
+        # Plugins are tracked both, in a common plugins
+        # dict, and in per-plugin kind dict (as retrieving
+        # plugins by kind is a common use case.
+        self.plugins[key] = obj
+        self.kind_map[obj.kind][key] = obj
+
+        for alias in obj.aliases:
+            alias_id = identifier(alias.name.lower())
+            if alias_id in self.plugins or alias_id in self.aliases:
+                raise PluginLoaderError('{} "{}" already exists.'.format(obj.kind, obj.name))
+            self.aliases[alias_id] = alias
+
+        # Update global aliases list. If a global alias is already in the list,
+        # then make sure this plugin is in the same parent/child hierarchy
+        # as the one already found.
+        for param in obj.parameters:
+            if param.global_alias:
+                if param.global_alias not in self.global_param_aliases:
+                    ga = GlobalParameterAlias(param.global_alias)
+                    ga.update(obj)
+                    self.global_param_aliases[ga.name] = ga
+                else:  # global alias already exists.
+                    self.global_param_aliases[param.global_alias].update(obj)
diff --git a/wa/framework/pluginloader.py b/wa/framework/pluginloader.py
new file mode 100644
index 00000000..17924a4e
--- /dev/null
+++ b/wa/framework/pluginloader.py
@@ -0,0 +1,69 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import sys
+
+
+class __LoaderWrapper(object):
+
+    def __init__(self):
+        self._loader = None
+
+    def reset(self):
+        # These imports cannot be done at top level, because of 
+        # sys.modules manipulation below
+        from wa.framework.plugin import PluginLoader
+        from wa.framework.configuration.core import settings
+        self._loader = PluginLoader(settings.plugin_packages,
+                                    settings.plugin_paths,
+                                    settings.plugin_ignore_paths)
+
+    def update(self, packages=None, paths=None, ignore_paths=None):
+        if not self._loader: self.reset()
+        self._loader.update(packages, paths, ignore_paths)
+
+    def reload(self):
+        if not self._loader: self.reset()
+        self._loader.reload()
+
+    def list_plugins(self, kind=None):
+        if not self._loader: self.reset()
+        return self._loader.list_plugins(kind)
+
+    def has_plugin(self, name, kind=None):
+        if not self._loader: self.reset()
+        return self._loader.has_plugin(name, kind)
+
+    def get_plugin_class(self, name, kind=None):
+        if not self._loader: self.reset()
+        return _load.get_plugin_class(name, kind)
+
+    def get_plugin(self, name, kind=None, *args, **kwargs):
+        if not self._loader: self.reset()
+        return self._loader.get_plugin(name, kind=kind, *args, **kwargs)
+
+    def get_default_config(self, name):
+        if not self._loader: self.reset()
+        return self._loader.get_default_config(name)
+
+    def resolve_alias(self, name):
+        if not self._loader: self.reset()
+        return self._loader.resolve_alias(name)
+
+    def __getattr__(self, name):
+        if not self._loader: self.reset()
+        return getattr(self._loader, name)
+
+
+sys.modules[__name__] =  __LoaderWrapper()
diff --git a/wa/framework/resource.py b/wa/framework/resource.py
new file mode 100644
index 00000000..abf77827
--- /dev/null
+++ b/wa/framework/resource.py
@@ -0,0 +1,711 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+import sys
+import glob
+import shutil
+import inspect
+import logging
+from collections import defaultdict
+
+from wa.framework import pluginloader
+from wa.framework.plugin import Plugin, Parameter
+from wa.framework.exception import ResourceError
+from wa.framework.configuration import settings
+from wa.utils.misc import ensure_directory_exists as _d
+from wa.utils.types import boolean
+from wa.utils.types import prioritylist
+
+
+class GetterPriority(object):
+    """
+    Enumerates standard ResourceGetter priorities. In general, getters should
+    register under one of these, rather than specifying other priority values.
+
+
+    :cached: The cached version of the resource. Look here first. This
+    priority also implies
+             that the resource at this location is a "cache" and is not
+             the only version of the resource, so it may be cleared without
+             losing access to the resource.
+    :preferred: Take this resource in favour of the environment resource.
+    :environment: Found somewhere under ~/.workload_automation/ or equivalent,
+                  or from environment variables, external configuration
+                  files, etc.  These will override resource supplied with
+                  the package.
+    :external_package: Resource provided by another package.  :package:
+                       Resource provided with the package.  :remote:
+                       Resource will be downloaded from a remote location
+                       (such as an HTTP server or a samba share). Try this
+                       only if no other getter was successful.
+
+    """
+    cached = 20
+    preferred = 10
+    environment = 0
+    external_package = -5
+    package = -10
+    remote = -20
+
+
+class Resource(object):
+    """
+    Represents a resource that needs to be resolved. This can be pretty much
+    anything: a file, environment variable, a Python object, etc. The only
+    thing a resource *has* to have is an owner (which would normally be the
+    Workload/Instrument/Device/etc object that needs the resource). In
+    addition, a resource have any number of attributes to identify, but all of
+    them are resource type specific.
+
+    """
+
+    name = None
+
+    def __init__(self, owner):
+        self.owner = owner
+
+    def delete(self, instance):
+        """
+        Delete an instance of this resource type. This must be implemented
+        by the concrete subclasses based on what the resource looks like,
+        e.g. deleting a file or a directory tree, or removing an entry from
+        a database.
+
+        :note: Implementation should *not* contain any logic for deciding
+               whether or not a resource should be deleted, only the actual
+               deletion. The assumption is that if this method is invoked,
+               then the decision has already been made.
+
+        """
+        raise NotImplementedError()
+
+    def __str__(self):
+        return '<{}\'s {}>'.format(self.owner, self.name)
+
+
+class ResourceGetter(Plugin):
+    """
+    Base class for implementing resolvers. Defines resolver
+    interface. Resolvers are responsible for discovering resources (such as
+    particular kinds of files) they know about based on the parameters that are
+    passed to them. Each resolver also has a dict of attributes that describe
+    it's operation, and may be used to determine which get invoked.  There is
+    no pre-defined set of attributes and resolvers may define their own.
+
+    Class attributes:
+
+    :name: Name that uniquely identifies this getter. Must be set by any
+           concrete subclass.
+    :resource_type: Identifies resource type(s) that this getter can
+                    handle. This must be either a string (for a single type)
+                    or a list of strings for multiple resource types. This
+                    must be set by any concrete subclass.
+    :priority: Priority with which this getter will be invoked. This should
+               be one of the standard priorities specified in
+               ``GetterPriority`` enumeration. If not set, this will default
+               to ``GetterPriority.environment``.
+
+    """
+
+    name = None
+    kind = 'resource_getter'
+    resource_type = None
+    priority = GetterPriority.environment
+
+    def __init__(self, resolver, **kwargs):
+        super(ResourceGetter, self).__init__(**kwargs)
+        self.resolver = resolver
+
+    def register(self):
+        """
+        Registers with a resource resolver. Concrete implementations must
+        override this to invoke ``self.resolver.register()`` method to register
+        ``self`` for specific resource types.
+
+        """
+        if self.resource_type is None:
+            message = 'No resource type specified for {}'
+            raise ValueError(message.format(self.name))
+        elif isinstance(self.resource_type, list):
+            for rt in self.resource_type:
+                self.resolver.register(self, rt, self.priority)
+        else:
+            self.resolver.register(self, self.resource_type, self.priority)
+
+    def unregister(self):
+        """Unregister from a resource resolver."""
+        if self.resource_type is None:
+            message = 'No resource type specified for {}'
+            raise ValueError(message.format(self.name))
+        elif isinstance(self.resource_type, list):
+            for rt in self.resource_type:
+                self.resolver.unregister(self, rt)
+        else:
+            self.resolver.unregister(self, self.resource_type)
+
+    def get(self, resource, **kwargs):
+        """
+        This will get invoked by the resolver when attempting to resolve a
+        resource, passing in the resource to be resolved as the first
+        parameter. Any additional parameters would be specific to a particular
+        resource type.
+
+        This method will only be invoked for resource types that the getter has
+        registered for.
+
+        :param resource: an instance of :class:`wlauto.core.resource.Resource`.
+
+        :returns: Implementations of this method must return either the
+                  discovered resource or ``None`` if the resource could not
+                  be discovered.
+
+        """
+        raise NotImplementedError()
+
+    def delete(self, resource, *args, **kwargs):
+        """
+        Delete the resource if it is discovered. All arguments are passed to a
+        call to``self.get()``. If that call returns a resource, it is deleted.
+
+        :returns: ``True`` if the specified resource has been discovered
+                  and deleted, and ``False`` otherwise.
+
+        """
+        discovered = self.get(resource, *args, **kwargs)
+        if discovered:
+            resource.delete(discovered)
+            return True
+        else:
+            return False
+
+    def __str__(self):
+        return '<ResourceGetter {}>'.format(self.name)
+
+
+class ResourceResolver(object):
+    """
+    Discovers and registers getters, and then handles requests for
+    resources using registered getters.
+
+    """
+
+    def __init__(self):
+        self.logger = logging.getLogger('resolver')
+        self.getters = defaultdict(prioritylist)
+
+    def load(self, loader=pluginloader):
+        """
+        Discover getters under the specified source. The source could
+        be either a python package/module or a path.
+
+        """
+        for rescls in loader.list_resource_getters():
+            getter = loader.get_resource_getter(rescls.name, resolver=self)
+            getter.register()
+
+    def get(self, resource, strict=True, *args, **kwargs):
+        """
+        Uses registered getters to attempt to discover a resource of the specified
+        kind and matching the specified criteria. Returns path to the resource that
+        has been discovered. If a resource has not been discovered, this will raise
+        a ``ResourceError`` or, if ``strict`` has been set to ``False``, will return
+        ``None``.
+
+        """
+        self.logger.debug('Resolving {}'.format(resource))
+        for getter in self.getters[resource.name]:
+            self.logger.debug('Trying {}'.format(getter))
+            result = getter.get(resource, *args, **kwargs)
+            if result is not None:
+                self.logger.debug('Resource {} found using {}:'.format(resource, getter))
+                self.logger.debug('\t{}'.format(result))
+                return result
+        if strict:
+            raise ResourceError('{} could not be found'.format(resource))
+        self.logger.debug('Resource {} not found.'.format(resource))
+        return None
+
+    def register(self, getter, kind, priority=0):
+        """
+        Register the specified resource getter as being able to discover a resource
+        of the specified kind with the specified priority.
+
+        This method would typically be invoked by a getter inside its __init__.
+        The idea being that getters register themselves for resources they know
+        they can discover.
+
+        *priorities*
+
+        getters that are registered with the highest priority will be invoked first. If
+        multiple getters are registered under the same priority, they will be invoked
+        in the order they were registered (i.e. in the order they were discovered). This is
+        essentially non-deterministic.
+
+        Generally getters that are more likely to find a resource, or would find a
+        "better" version of the resource should register with higher (positive) priorities.
+        Fall-back getters that should only be invoked if a resource is not found by usual
+        means should register with lower (negative) priorities.
+
+        """
+        self.logger.debug('Registering {}'.format(getter.name))
+        self.getters[kind].add(getter, priority)
+
+    def unregister(self, getter, kind):
+        """
+        Unregister a getter that has been registered earlier.
+
+        """
+        self.logger.debug('Unregistering {}'.format(getter.name))
+        try:
+            self.getters[kind].remove(getter)
+        except ValueError:
+            raise ValueError('Resource getter {} is not installed.'.format(getter.name))
+
+
+class __NullOwner(object):
+    """Represents an owner for a resource not owned by anyone."""
+
+    name = 'noone'
+    dependencies_directory = settings.dependencies_directory
+
+    def __getattr__(self, name):
+        return None
+
+    def __str__(self):
+        return 'no-one'
+
+    __repr__ = __str__
+
+
+NO_ONE = __NullOwner()
+
+
+class FileResource(Resource):
+    """
+    Base class for all resources that are a regular file in the
+    file system.
+
+    """
+
+    def delete(self, instance):
+        os.remove(instance)
+
+
+class File(FileResource):
+
+    name = 'file'
+
+    def __init__(self, owner, path, url=None):
+        super(File, self).__init__(owner)
+        self.path = path
+        self.url = url
+
+    def __str__(self):
+        return '<{}\'s {} {}>'.format(self.owner, self.name, self.path or self.url)
+
+
+class ExtensionAsset(File):
+
+    name = 'extension_asset'
+
+    def __init__(self, owner, path):
+        super(ExtensionAsset, self).__init__(
+            owner, os.path.join(owner.name, path))
+
+
+class Executable(FileResource):
+
+    name = 'executable'
+
+    def __init__(self, owner, platform, filename):
+        super(Executable, self).__init__(owner)
+        self.platform = platform
+        self.filename = filename
+
+    def __str__(self):
+        return '<{}\'s {} {}>'.format(self.owner, self.platform, self.filename)
+
+
+class ReventFile(FileResource):
+
+    name = 'revent'
+
+    def __init__(self, owner, stage):
+        super(ReventFile, self).__init__(owner)
+        self.stage = stage
+
+
+class JarFile(FileResource):
+
+    name = 'jar'
+
+
+class ApkFile(FileResource):
+
+    name = 'apk'
+
+
+class PackageFileGetter(ResourceGetter):
+
+    name = 'package_file'
+    description = """
+    Looks for exactly one file with the specified extension in the owner's
+    directory. If a version is specified on invocation of get, it will filter
+    the discovered file based on that version.  Versions are treated as
+    case-insensitive.
+    """
+
+    extension = None
+
+    def register(self):
+        self.resolver.register(self, self.extension, GetterPriority.package)
+
+    def get(self, resource, **kwargs):
+        resource_dir = os.path.dirname(
+            sys.modules[resource.owner.__module__].__file__)
+        version = kwargs.get('version')
+        return get_from_location_by_extension(resource, resource_dir, self.extension, version)
+
+
+class EnvironmentFileGetter(ResourceGetter):
+
+    name = 'environment_file'
+    description = """
+    Looks for exactly one file with the specified extension in the owner's
+    directory. If a version is specified on invocation of get, it will filter
+    the discovered file based on that version.  Versions are treated as
+    case-insensitive.
+    """
+
+    extension = None
+
+    def register(self):
+        self.resolver.register(self, self.extension,
+                               GetterPriority.environment)
+
+    def get(self, resource, **kwargs):
+        resource_dir = resource.owner.dependencies_directory
+        version = kwargs.get('version')
+        return get_from_location_by_extension(resource, resource_dir, self.extension, version)
+
+
+class ReventGetter(ResourceGetter):
+    """Implements logic for identifying revent files."""
+
+    def get_base_location(self, resource):
+        raise NotImplementedError()
+
+    def register(self):
+        self.resolver.register(self, 'revent', GetterPriority.package)
+
+    def get(self, resource, **kwargs):
+        filename = '.'.join([resource.owner.device.name,
+                             resource.stage, 'revent']).lower()
+        location = _d(os.path.join(
+            self.get_base_location(resource), 'revent_files'))
+        for candidate in os.listdir(location):
+            if candidate.lower() == filename.lower():
+                return os.path.join(location, candidate)
+
+
+class PackageApkGetter(PackageFileGetter):
+    name = 'package_apk'
+    extension = 'apk'
+
+
+class PackageJarGetter(PackageFileGetter):
+    name = 'package_jar'
+    extension = 'jar'
+
+
+class PackageReventGetter(ReventGetter):
+
+    name = 'package_revent'
+
+    def get_base_location(self, resource):
+        return _get_owner_path(resource)
+
+
+class EnvironmentApkGetter(EnvironmentFileGetter):
+    name = 'environment_apk'
+    extension = 'apk'
+
+
+class EnvironmentJarGetter(EnvironmentFileGetter):
+    name = 'environment_jar'
+    extension = 'jar'
+
+
+class EnvironmentReventGetter(ReventGetter):
+
+    name = 'enviroment_revent'
+
+    def get_base_location(self, resource):
+        return resource.owner.dependencies_directory
+
+
+class ExecutableGetter(ResourceGetter):
+
+    name = 'exe_getter'
+    resource_type = 'executable'
+    priority = GetterPriority.environment
+
+    def get(self, resource, **kwargs):
+        if settings.binaries_repository:
+            path = os.path.join(settings.binaries_repository,
+                                resource.platform, resource.filename)
+            if os.path.isfile(path):
+                return path
+
+
+class PackageExecutableGetter(ExecutableGetter):
+
+    name = 'package_exe_getter'
+    priority = GetterPriority.package
+
+    def get(self, resource, **kwargs):
+        path = os.path.join(_get_owner_path(resource), 'bin',
+                            resource.platform, resource.filename)
+        if os.path.isfile(path):
+            return path
+
+
+class EnvironmentExecutableGetter(ExecutableGetter):
+
+    name = 'env_exe_getter'
+
+    def get(self, resource, **kwargs):
+        paths = [
+            os.path.join(resource.owner.dependencies_directory, 'bin',
+                         resource.platform, resource.filename),
+            os.path.join(settings.environment_root, 'bin',
+                         resource.platform, resource.filename),
+        ]
+        for path in paths:
+            if os.path.isfile(path):
+                return path
+
+
+class DependencyFileGetter(ResourceGetter):
+
+    name = 'filer'
+    description = """
+    Gets resources from the specified mount point. Copies them the local dependencies
+    directory, and returns the path to the local copy.
+
+    """
+    resource_type = 'file'
+    relative_path = ''  # May be overridden by subclasses.
+
+    default_mount_point = '/'
+    priority = GetterPriority.remote
+
+    parameters = [
+        Parameter('mount_point', default='/', global_alias='filer_mount_point',
+                  description='Local mount point for the remote filer.'),
+    ]
+
+    def __init__(self, resolver, **kwargs):
+        super(DependencyFileGetter, self).__init__(resolver, **kwargs)
+        self.mount_point = settings.filer_mount_point or self.default_mount_point
+
+    def get(self, resource, **kwargs):
+        force = kwargs.get('force')
+        remote_path = os.path.join(
+            self.mount_point, self.relative_path, resource.path)
+        local_path = os.path.join(
+            resource.owner.dependencies_directory, os.path.basename(resource.path))
+
+        if not os.path.isfile(local_path) or force:
+            if not os.path.isfile(remote_path):
+                return None
+            self.logger.debug('Copying {} to {}'.format(
+                remote_path, local_path))
+            shutil.copy(remote_path, local_path)
+
+        return local_path
+
+
+class PackageCommonDependencyGetter(ResourceGetter):
+
+    name = 'packaged_common_dependency'
+    resource_type = 'file'
+    priority = GetterPriority.package - 1  # check after owner-specific locations
+
+    def get(self, resource, **kwargs):
+        path = os.path.join(settings.package_directory,
+                            'common', resource.path)
+        if os.path.exists(path):
+            return path
+
+
+class EnvironmentCommonDependencyGetter(ResourceGetter):
+
+    name = 'environment_common_dependency'
+    resource_type = 'file'
+    # check after owner-specific locations
+    priority = GetterPriority.environment - 1
+
+    def get(self, resource, **kwargs):
+        path = os.path.join(settings.dependencies_directory,
+                            os.path.basename(resource.path))
+        if os.path.exists(path):
+            return path
+
+
+class PackageDependencyGetter(ResourceGetter):
+
+    name = 'packaged_dependency'
+    resource_type = 'file'
+    priority = GetterPriority.package
+
+    def get(self, resource, **kwargs):
+        owner_path = inspect.getfile(resource.owner.__class__)
+        path = os.path.join(os.path.dirname(owner_path), resource.path)
+        if os.path.exists(path):
+            return path
+
+
+class EnvironmentDependencyGetter(ResourceGetter):
+
+    name = 'environment_dependency'
+    resource_type = 'file'
+    priority = GetterPriority.environment
+
+    def get(self, resource, **kwargs):
+        path = os.path.join(resource.owner.dependencies_directory,
+                            os.path.basename(resource.path))
+        if os.path.exists(path):
+            return path
+
+
+class ExtensionAssetGetter(DependencyFileGetter):
+
+    name = 'extension_asset'
+    resource_type = 'extension_asset'
+    relative_path = 'workload_automation/assets'
+
+
+class RemoteFilerGetter(ResourceGetter):
+
+    name = 'filer_assets'
+    description = """
+    Finds resources on a (locally mounted) remote filer and caches them locally.
+
+    This assumes that the filer is mounted on the local machine (e.g. as a samba share).
+
+    """
+    priority = GetterPriority.remote
+    resource_type = ['apk', 'file', 'jar', 'revent']
+
+    parameters = [
+        Parameter('remote_path', global_alias='remote_assets_path', default='',
+                  description="""
+                  Path, on the local system, where the assets are located.
+                  """),
+        Parameter('always_fetch', kind=boolean, default=False, global_alias='always_fetch_remote_assets',
+                  description="""
+                  If ``True``, will always attempt to fetch assets from the
+                  remote, even if a local cached copy is available.
+                  """),
+    ]
+
+    def get(self, resource, **kwargs):
+        version = kwargs.get('version')
+        if resource.owner:
+            remote_path = os.path.join(self.remote_path, resource.owner.name)
+            local_path = os.path.join(
+                settings.environment_root, resource.owner.dependencies_directory)
+            return self.try_get_resource(resource, version, remote_path, local_path)
+        else:
+            result = None
+            for entry in os.listdir(remote_path):
+                remote_path = os.path.join(self.remote_path, entry)
+                local_path = os.path.join(
+                    settings.environment_root, settings.dependencies_directory, entry)
+                result = self.try_get_resource(
+                    resource, version, remote_path, local_path)
+                if result:
+                    break
+            return result
+
+    def try_get_resource(self, resource, version, remote_path, local_path):
+        if not self.always_fetch:
+            result = self.get_from(resource, version, local_path)
+            if result:
+                return result
+        if remote_path:
+            # Didn't find it cached locally; now check the remoted
+            result = self.get_from(resource, version, remote_path)
+            if not result:
+                return result
+        else:  # remote path is not set
+            return None
+        # Found it remotely, cache locally, then return it
+        local_full_path = os.path.join(
+            _d(local_path), os.path.basename(result))
+        self.logger.debug('cp {} {}'.format(result, local_full_path))
+        shutil.copy(result, local_full_path)
+        return local_full_path
+
+    def get_from(self, resource, version, location):  # pylint: disable=no-self-use
+        if resource.name in ['apk', 'jar']:
+            return get_from_location_by_extension(resource, location, resource.name, version)
+        elif resource.name == 'file':
+            filepath = os.path.join(location, resource.path)
+            if os.path.exists(filepath):
+                return filepath
+        elif resource.name == 'revent':
+            filename = '.'.join(
+                [resource.owner.device.name, resource.stage, 'revent']).lower()
+            alternate_location = os.path.join(location, 'revent_files')
+            # There tends to be some confusion as to where revent files should
+            # be placed. This looks both in the extension's directory, and in
+            # 'revent_files' subdirectory under it, if it exists.
+            if os.path.isdir(alternate_location):
+                for candidate in os.listdir(alternate_location):
+                    if candidate.lower() == filename.lower():
+                        return os.path.join(alternate_location, candidate)
+            if os.path.isdir(location):
+                for candidate in os.listdir(location):
+                    if candidate.lower() == filename.lower():
+                        return os.path.join(location, candidate)
+        else:
+            message = 'Unexpected resource type: {}'.format(resource.name)
+            raise ValueError(message)
+
+
+# Utility functions
+
+def get_from_location_by_extension(resource, location, extension, version=None):
+    found_files = glob.glob(os.path.join(location, '*.{}'.format(extension)))
+    if version:
+        found_files = [ff for ff in found_files 
+                       if version.lower() in os.path.basename(ff).lower()]
+    if len(found_files) == 1:
+        return found_files[0]
+    elif not found_files:
+        return None
+    else:
+        raise ResourceError('More than one .{} found in {} for {}.'.format(extension,
+                                                                           location,
+                                                                           resource.owner.name))
+
+
+def _get_owner_path(resource):
+    if resource.owner is NO_ONE:
+        return os.path.join(os.path.dirname(__base_filepath), 'common')
+    else:
+        return os.path.dirname(sys.modules[resource.owner.__module__].__file__)
diff --git a/wa/framework/run.py b/wa/framework/run.py
new file mode 100644
index 00000000..90e717e9
--- /dev/null
+++ b/wa/framework/run.py
@@ -0,0 +1,355 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import uuid
+import logging
+from copy import copy
+from datetime import datetime, timedelta
+from collections import OrderedDict
+
+from wa.framework import signal, pluginloader, log
+from wa.framework.plugin import Plugin
+from wa.framework.output import Status
+from wa.framework.resource import ResourceResolver
+from wa.framework.exception import JobError
+from wa.utils import counter
+from wa.utils.serializer import json
+from wa.utils.misc import ensure_directory_exists as _d
+from wa.utils.types import TreeNode, caseless_string
+
+
+
+class JobActor(object):
+
+    def get_config(self):
+        return {}
+
+    def initialize(self, context):
+        pass
+
+    def run(self):
+        pass
+
+    def finalize(self):
+        pass
+
+    def restart(self):
+        pass
+
+    def complete(self):
+        pass
+
+
+class RunnerJob(object):
+
+    @property
+    def status(self):
+        return self.output.status
+
+    @status.setter
+    def status(self, value):
+        self.output.status = value
+
+    @property
+    def should_retry(self):
+        return self.attempt <= self.max_retries
+
+    def __init__(self, id, actor, output, max_retries):
+        self.id = id
+        self.actor = actor
+        self.output = output
+        self.max_retries = max_retries
+        self.status = Status.NEW
+        self.attempt = 0
+
+    def initialize(self, context):
+        self.actor.initialize(context)
+        self.status = Status.PENDING
+
+    def run(self):
+        self.status = Status.RUNNING
+        self.attempt += 1
+        self.output.config = self.actor.get_config()
+        self.output.initialize()
+        self.actor.run()
+        self.status = Status.COMPLETE
+
+    def finalize(self):
+        self.actor.finalize()
+
+    def restart(self):
+        self.actor.restart()
+
+    def complete(self):
+        self.actor.complete()
+
+
+__run_methods = set()
+
+
+def runmethod(method):
+    """
+    A method decorator that ensures that a method is invoked only once per run.
+
+    """
+    def _method_wrapper(*args, **kwargs):
+        if method in __run_methods:
+            return
+        __run_methods.add(method)
+        ret = method(*args, **kwargs)
+        if ret is not None:
+            message = 'runmethod()\'s must return None; method "{}" returned "{}"'
+            raise RuntimeError(message.format(method, ret))
+    return _method_wrapper
+
+
+def reset_runmethods():
+    global __run_methods
+    __run_methods = set()
+
+
+class Runner(object):
+
+    @property
+    def info(self):
+        return self.output.info
+
+    @property
+    def status(self):
+        return self.output.status
+
+    @status.setter
+    def status(self, value):
+        self.output.status = value
+
+    @property
+    def jobs_pending(self):
+        return len(self.job_queue) > 0
+
+    @property
+    def current_job(self):
+        if self.job_queue:
+            return self.job_queue[0]
+
+    @property
+    def previous_job(self):
+        if self.completed_jobs:
+            return self.completed_jobs[-1]
+
+    @property
+    def next_job(self):
+        if len(self.job_queue) > 1:
+            return self.job_queue[1]
+
+    def __init__(self, output):
+        self.logger = logging.getLogger('runner')
+        self.output = output
+        self.context = RunContext(self)
+        self.status = Status.NEW
+        self.job_queue = []
+        self.completed_jobs = []
+        self._known_ids = set([])
+
+    def add_job(self, job_id, actor, max_retries=2):
+        job_id = caseless_string(job_id)
+        if job_id in self._known_ids:
+            raise JobError('Job with id "{}" already exists'.format(job_id))
+        output = self.output.create_job_output(job_id)
+        self.job_queue.append(RunnerJob(job_id, actor, output, max_retries))
+        self._known_ids.add(job_id)
+
+    def initialize(self):
+        self.logger.info('Initializing run')
+        self.start_time = datetime.now()
+        if not self.info.start_time:
+            self.info.start_time = self.start_time
+            self.info.duration = timedelta()
+
+        self.context.initialize()
+        for job in self.job_queue:
+            job.initialize(self.context)
+        self.persist_state()
+        self.logger.info('Run initialized')
+
+    def run(self):
+        self.status = Status.RUNNING
+        reset_runmethods()
+        signal.send(signal.RUN_STARTED, self, self.context)
+        self.initialize()
+        signal.send(signal.RUN_INITIALIZED, self, self.context)
+        self.run_jobs()
+        signal.send(signal.RUN_COMPLETED, self, self.context)
+        self.finalize()
+        signal.send(signal.RUN_FINALIZED, self, self.context)
+
+    def run_jobs(self):
+        try:
+            self.logger.info('Running jobs')
+            while self.jobs_pending:
+                self.begin_job()
+                log.indent()
+                try:
+                    self.current_job.run()
+                except KeyboardInterrupt:
+                    self.current_job.status = Status.ABORTED
+                    signal.send(signal.JOB_ABORTED, self, self.current_job)
+                    raise
+                except Exception as e:
+                    self.current_job.status = Status.FAILED
+                    log.log_error(e, self.logger)
+                    signal.send(signal.JOB_FAILED, self, self.current_job)
+                else:
+                    self.current_job.status = Status.COMPLETE
+                finally:
+                    log.dedent()
+                    self.complete_job()
+        except KeyboardInterrupt:
+            self.status = Status.ABORTED
+            while self.job_queue:
+                job = self.job_queue.pop(0)
+                job.status = RunnerJob.ABORTED
+                self.completed_jobs.append(job)
+            signal.send(signal.RUN_ABORTED, self, self)
+            raise
+        except Exception as e:
+            self.status = Status.FAILED
+            log.log_error(e, self.logger)
+            signal.send(signal.RUN_FAILED, self, self)
+        else:
+            self.status = Status.COMPLETE
+
+    def finalize(self):
+        self.logger.info('Finalizing run')
+        for job in self.job_queue:
+            job.finalize()
+        self.end_time = datetime.now()
+        self.info.end_time = self.end_time
+        self.info.duration += self.end_time - self.start_time
+        self.persist_state()
+        signal.send(signal.RUN_FINALIZED, self, self)
+        self.logger.info('Run completed')
+
+    def begin_job(self):
+        self.logger.info('Starting job {}'.format(self.current_job.id))
+        signal.send(signal.JOB_STARTED, self, self.current_job)
+        self.persist_state()
+
+    def complete_job(self):
+        if self.current_job.status == Status.FAILED:
+            self.output.move_failed(self.current_job.output)
+            if self.current_job.should_retry:
+                self.logger.info('Restarting job {}'.format(self.current_job.id))
+                self.persist_state()
+                self.current_job.restart()
+                signal.send(signal.JOB_RESTARTED, self, self.current_job)
+                return
+
+        self.logger.info('Completing job {}'.format(self.current_job.id))
+        self.current_job.complete()
+        self.persist_state()
+        signal.send(signal.JOB_COMPLETED, self, self.current_job)
+        job = self.job_queue.pop(0)
+        self.completed_jobs.append(job)
+
+    def persist_state(self):
+        self.output.persist()
+
+
+class RunContext(object):
+    """
+    Provides a context for instrumentation. Keeps track of things like
+    current workload and iteration.
+
+    """
+
+    @property
+    def run_output(self):
+        return self.runner.output
+
+    @property
+    def current_job(self):
+        return self.runner.current_job
+
+    @property
+    def run_output_directory(self):
+        return self.run_output.output_directory
+
+    @property
+    def output_directory(self):
+        if self.runner.current_job:
+            return self.runner.current_job.output.output_directory
+        else:
+            return self.run_output.output_directory
+
+    @property
+    def info_directory(self):
+        return self.run_output.info_directory
+
+    @property
+    def config_directory(self):
+        return self.run_output.config_directory
+
+    @property
+    def failed_directory(self):
+        return self.run_output.failed_directory
+
+    @property
+    def log_file(self):
+        return os.path.join(self.output_directory, 'run.log')
+
+
+    def __init__(self, runner):
+        self.runner = runner
+        self.job = None
+        self.iteration = None
+        self.job_output = None
+        self.resolver = ResourceResolver()
+
+    def initialize(self):
+        self.resolver.load()
+
+    def get_path(self, subpath):
+        if self.current_job is None:
+            return self.run_output.get_path(subpath)
+        else:
+            return self.current_job.output.get_path(subpath)
+
+    def add_metric(self, *args, **kwargs):
+        if self.current_job is None:
+            self.run_output.add_metric(*args, **kwargs)
+        else:
+            self.current_job.output.add_metric(*args, **kwargs)
+
+    def add_artifact(self, name, path, kind, *args, **kwargs):
+        if self.current_job is None:
+            self.add_run_artifact(name, path, kind, *args, **kwargs)
+        else:
+            self.add_job_artifact(name, path, kind, *args, **kwargs)
+
+    def add_run_artifact(self, *args, **kwargs):
+        self.run_output.add_artifiact(*args, **kwargs)
+
+    def add_job_artifact(self, *args, **kwargs):
+        self.current_job.output.add_artifact(*args, **kwargs)
+
+    def get_artifact(self, name):
+        if self.iteration_artifacts:
+            for art in self.iteration_artifacts:
+                if art.name == name:
+                    return art
+        for art in self.run_artifacts:
+            if art.name == name:
+                return art
+        return None
+
diff --git a/wa/framework/signal.py b/wa/framework/signal.py
new file mode 100644
index 00000000..1f9a5024
--- /dev/null
+++ b/wa/framework/signal.py
@@ -0,0 +1,287 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+"""
+This module wraps louie signalling mechanism. It relies on modified version of loiue
+that has prioritization added to handler invocation.
+
+"""
+import logging
+from contextlib import contextmanager
+
+from louie import dispatcher
+
+from wa.utils.types import prioritylist
+
+
+logger = logging.getLogger('dispatcher')
+
+
+class Signal(object):
+    """
+    This class implements the signals to be used for notifiying callbacks
+    registered to respond to different states and stages of the execution of workload
+    automation.
+
+    """
+
+    def __init__(self, name, description='no description', invert_priority=False):
+        """
+        Instantiates a Signal.
+
+            :param name: name is the identifier of the Signal object. Signal instances with
+                        the same name refer to the same execution stage/stage.
+            :param invert_priority: boolean parameter that determines whether multiple
+                                    callbacks for the same signal should be ordered with
+                                    ascending or descending priorities. Typically this flag
+                                    should be set to True if the Signal is triggered AFTER an
+                                    a state/stage has been reached. That way callbacks with high
+                                    priorities will be called right after the event has occured.
+        """
+        self.name = name
+        self.description = description
+        self.invert_priority = invert_priority
+
+    def __str__(self):
+        return self.name
+
+    __repr__ = __str__
+
+    def __hash__(self):
+        return id(self.name)
+
+
+RUN_STARTED = Signal('run-started', 'sent at the beginning of the run')
+RUN_INITIALIZED = Signal('run-initialized', 'set after the run has been initialized')
+RUN_ABORTED = Signal('run-aborted', 'set when the run has been aborted due to a keyboard interrupt')
+RUN_FAILED = Signal('run-failed', 'set if the run has failed to complete all jobs.' )
+RUN_COMPLETED = Signal('run-completed', 'set upon completion of the run (regardless of whether or not it has failed')
+RUN_FINALIZED = Signal('run-finalized', 'set after the run has been finalized')
+
+JOB_STARTED = Signal('job-started', 'set when a a new job has been started')
+JOB_ABORTED = Signal('job-aborted',
+                     description='''
+                     sent if a job has been aborted due to a keyboard interrupt. 
+
+                     .. note:: While the status of every job that has not had a chance to run
+                               due to being interrupted will be set to "ABORTED", this signal will
+                               only be sent for the job that was actually running at the time.
+
+                     ''')
+JOB_FAILED = Signal('job-failed', description='set if the job has failed')
+JOB_RESTARTED = Signal('job-restarted')
+JOB_COMPLETED = Signal('job-completed')
+JOB_FINALIZED = Signal('job-finalized')
+
+ERROR_LOGGED = Signal('error-logged')
+WARNING_LOGGED = Signal('warning-logged')
+
+# These are paired events -- if the before_event is sent, the after_ signal is
+# guaranteed to also be sent. In particular, the after_ signals will be sent
+# even if there is an error, so you cannot assume in the handler that the
+# device has booted successfully. In most cases, you should instead use the
+# non-paired signals below.
+BEFORE_FLASHING = Signal('before-flashing', invert_priority=True)
+SUCCESSFUL_FLASHING = Signal('successful-flashing')
+AFTER_FLASHING = Signal('after-flashing')
+
+BEFORE_BOOT = Signal('before-boot', invert_priority=True)
+SUCCESSFUL_BOOT = Signal('successful-boot')
+AFTER_BOOT = Signal('after-boot')
+
+BEFORE_TARGET_CONNECT = Signal('before-target-connect', invert_priority=True)
+SUCCESSFUL_TARGET_CONNECT = Signal('successful-target-connect')
+AFTER_TARGET_CONNECT = Signal('after-target-connect')
+
+BEFORE_TARGET_DISCONNECT = Signal('before-target-disconnect', invert_priority=True)
+SUCCESSFUL_TARGET_DISCONNECT = Signal('successful-target-disconnect')
+AFTER_TARGET_DISCONNECT = Signal('after-target-disconnect')
+
+BEFORE_WORKLOAD_SETUP = Signal(
+    'before-workload-setup', invert_priority=True)
+SUCCESSFUL_WORKLOAD_SETUP = Signal('successful-workload-setup')
+AFTER_WORKLOAD_SETUP = Signal('after-workload-setup')
+
+BEFORE_WORKLOAD_EXECUTION = Signal(
+    'before-workload-execution', invert_priority=True)
+SUCCESSFUL_WORKLOAD_EXECUTION = Signal('successful-workload-execution')
+AFTER_WORKLOAD_EXECUTION = Signal('after-workload-execution')
+
+BEFORE_WORKLOAD_RESULT_UPDATE = Signal(
+    'before-workload-result-update', invert_priority=True)
+SUCCESSFUL_WORKLOAD_RESULT_UPDATE = Signal(
+    'successful-workload-result-update')
+AFTER_WORKLOAD_RESULT_UPDATE = Signal('after-workload-result-update')
+
+BEFORE_WORKLOAD_TEARDOWN = Signal(
+    'before-workload-teardown', invert_priority=True)
+SUCCESSFUL_WORKLOAD_TEARDOWN = Signal('successful-workload-teardown')
+AFTER_WORKLOAD_TEARDOWN = Signal('after-workload-teardown')
+
+BEFORE_OVERALL_RESULTS_PROCESSING = Signal(
+    'before-overall-results-process', invert_priority=True)
+SUCCESSFUL_OVERALL_RESULTS_PROCESSING = Signal(
+    'successful-overall-results-process')
+AFTER_OVERALL_RESULTS_PROCESSING = Signal(
+    'after-overall-results-process')
+
+
+class CallbackPriority(object):
+
+    EXTREMELY_HIGH = 30
+    VERY_HIGH = 20
+    HIGH = 10
+    NORMAL = 0
+    LOW = -10
+    VERY_LOW = -20
+    EXTREMELY_LOW = -30
+
+    def __init__(self):
+        raise ValueError('Cannot instantiate')
+
+
+class _prioritylist_wrapper(prioritylist):
+    """
+    This adds a NOP append() method so that when louie invokes it to add the
+    handler to receivers, nothing will happen; the handler is actually added inside
+    the connect() below according to priority, before louie's connect() gets invoked.
+
+    """
+
+    def append(self, *args, **kwargs):
+        pass
+
+
+def connect(handler, signal, sender=dispatcher.Any, priority=0):
+    """
+    Connects a callback to a signal, so that the callback will be automatically invoked
+    when that signal is sent.
+
+    Parameters:
+
+        :handler: This can be any callable that that takes the right arguments for
+                  the signal. For most signals this means a single argument that
+                  will be an ``ExecutionContext`` instance. But please see documentation
+                  for individual signals in the :ref:`signals reference <instrumentation_method_map>`.
+        :signal: The signal to which the handler will be subscribed. Please see
+                 :ref:`signals reference <instrumentation_method_map>` for the list of standard WA
+                 signals.
+
+                 .. note:: There is nothing that prevents instrumentation from sending their
+                           own signals that are not part of the standard set. However the signal
+                           must always be an :class:`wlauto.core.signal.Signal` instance.
+
+        :sender: The handler will be invoked only for the signals emitted by this sender. By
+                 default, this is set to :class:`louie.dispatcher.Any`, so the handler will
+                 be invoked for signals from any sender.
+        :priority: An integer (positive or negative) the specifies the priority of the handler.
+                   Handlers with higher priority will be called before handlers with lower
+                   priority. The  call order of handlers with the same priority is not specified.
+                   Defaults to 0.
+
+                   .. note:: Priorities for some signals are inverted (so highest priority
+                             handlers get executed last). Please see :ref:`signals reference <instrumentation_method_map>`
+                             for details.
+
+    """
+    if getattr(signal, 'invert_priority', False):
+        priority = -priority
+    senderkey = id(sender)
+    if senderkey in dispatcher.connections:
+        signals = dispatcher.connections[senderkey]
+    else:
+        dispatcher.connections[senderkey] = signals = {}
+    if signal in signals:
+        receivers = signals[signal]
+    else:
+        receivers = signals[signal] = _prioritylist_wrapper()
+    receivers.add(handler, priority)
+    dispatcher.connect(handler, signal, sender)
+
+
+def disconnect(handler, signal, sender=dispatcher.Any):
+    """
+    Disconnect a previously connected handler form the specified signal, optionally, only
+    for the specified sender.
+
+    Parameters:
+
+        :handler: The callback to be disconnected.
+        :signal: The signal the handler is to be disconnected form. It will
+                 be an :class:`wlauto.core.signal.Signal` instance.
+        :sender: If specified, the handler will only be disconnected from the signal
+                sent by this sender.
+
+    """
+    dispatcher.disconnect(handler, signal, sender)
+
+
+def send(signal, sender=dispatcher.Anonymous, *args, **kwargs):
+    """
+    Sends a signal, causing connected handlers to be invoked.
+
+    Paramters:
+
+        :signal: Signal to be sent. This must be an instance of :class:`wlauto.core.signal.Signal`
+                 or its subclasses.
+        :sender: The sender of the signal (typically, this would be ``self``). Some handlers may only
+                 be subscribed to signals from a particular sender.
+
+        The rest of the parameters will be passed on as aruments to the handler.
+
+    """
+    return dispatcher.send(signal, sender, *args, **kwargs)
+
+
+# This will normally be set to log_error() by init_logging(); see wa.framework/log.py.
+# Done this way to prevent a circular import dependency.
+log_error_func = logger.error
+
+
+def safe_send(signal, sender=dispatcher.Anonymous, 
+              propagate=[KeyboardInterrupt], *args, **kwargs):
+    """
+    Same as ``send``, except this will catch and log all exceptions raised
+    by handlers, except those specified in ``propagate`` argument (defaults 
+    to just ``[KeyboardInterrupt]``).
+    """
+    try:
+        send(singnal, sender, *args, **kwargs)
+    except Exception as e:
+        if any(isinstance(e, p) for p in propagate):
+            raise e
+        log_error_func(e)
+
+
+@contextmanager
+def wrap(signal_name, sender=dispatcher.Anonymous, safe=False, *args, **kwargs):
+    """Wraps the suite in before/after signals, ensuring
+    that after signal is always sent."""
+    signal_name = signal_name.upper().replace('-', '_')
+    send_func = safe_send if safe else send
+    try:
+        before_signal = globals()['BEFORE_' + signal_name]
+        success_signal = globals()['SUCCESSFUL_' + signal_name]
+        after_signal = globals()['AFTER_' + signal_name]
+    except KeyError:
+        raise ValueError('Invalid wrapped signal name: {}'.format(signal_name))
+    try:
+        send_func(before_signal, sender, *args, **kwargs)
+        yield
+        send_func(success_signal, sender, *args, **kwargs)
+    finally:
+        send_func(after_signal, sender, *args, **kwargs)
+
diff --git a/wa/framework/version.py b/wa/framework/version.py
new file mode 100644
index 00000000..66543332
--- /dev/null
+++ b/wa/framework/version.py
@@ -0,0 +1,27 @@
+#    Copyright 2014-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import namedtuple
+
+
+VersionTuple = namedtuple('Version', ['major', 'minor', 'revision'])
+
+version = VersionTuple(3, 0, 0)
+
+
+def get_wa_version():
+    version_string = '{}.{}.{}'.format(
+        version.major, version.minor, version.revision)
+    return version_string
diff --git a/wa/framework/workload.py b/wa/framework/workload.py
new file mode 100644
index 00000000..b6defc03
--- /dev/null
+++ b/wa/framework/workload.py
@@ -0,0 +1,281 @@
+#    Copyright 2014-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import logging
+
+from wa.framework.plugin import TargetedPlugin
+from wa.framework.resource import JarFile, ReventFile, NO_ONE
+from wa.framework.exception import WorkloadError
+
+from devlib.utils.android import ApkInfo
+
+
+class Workload(TargetedPlugin):
+    """
+    This is the base class for the workloads executed by the framework.
+    Each of the methods throwing NotImplementedError *must* be implemented
+    by the derived classes.
+    """
+
+    kind = 'workload'
+
+    def init_resources(self, context):
+        """
+        This method may be used to perform early resource discovery and initialization. This is invoked
+        during the initial loading stage and before the device is ready, so cannot be used for any
+        device-dependent initialization. This method is invoked before the workload instance is
+        validated.
+
+        """
+        pass
+
+    def initialize(self, context):
+        """
+        This method should be used to perform once-per-run initialization of a
+        workload instance, i.e., unlike ``setup()`` it will not be invoked on
+        each iteration.
+        """
+        pass
+
+    def setup(self, context):
+        """
+        Perform the setup necessary to run the workload, such as copying the
+        necessary files to the device, configuring the environments, etc.
+
+        This is also the place to perform any on-device checks prior to
+        attempting to execute the workload.
+        """
+        pass
+
+    def run(self, context):
+        """Execute the workload. This is the method that performs the actual "work" of the"""
+        pass
+
+    def update_result(self, context):
+        """
+        Update the result within the specified execution context with the
+        metrics form this workload iteration.
+
+        """
+        pass
+
+    def teardown(self, context):
+        """ Perform any final clean up for the Workload. """
+        pass
+
+    def finalize(self, context):
+        pass
+
+    def __str__(self):
+        return '<Workload {}>'.format(self.name)
+
+
+class UiAutomatorGUI(object):
+
+    def __init__(self, target, package='', klass='UiAutomation', method='runUiAutoamtion'):
+        self.target = target
+        self.uiauto_package = package
+        self.uiauto_class = klass
+        self.uiauto_method = method
+        self.uiauto_file = None
+        self.target_uiauto_file = None
+        self.command = None
+        self.uiauto_params = {}
+
+    def init_resources(self, context):
+        self.uiauto_file = context.resolver.get(JarFile(self))
+        self.target_uiauto_file = self.target.path.join(self.target.working_directory,
+                                                        os.path.basename(self.uiauto_file))
+        if not self.uiauto_package:
+            self.uiauto_package = os.path.splitext(os.path.basename(self.uiauto_file))[0]
+
+    def validate(self):
+        if not self.uiauto_file:
+            raise WorkloadError('No UI automation JAR file found for workload {}.'.format(self.name))
+        if not self.uiauto_package:
+            raise WorkloadError('No UI automation package specified for workload {}.'.format(self.name))
+
+    def setup(self, context):
+        method_string = '{}.{}#{}'.format(self.uiauto_package, self.uiauto_class, self.uiauto_method)
+        params_dict = self.uiauto_params
+        params_dict['workdir'] = self.target.working_directory
+        params = ''
+        for k, v in self.uiauto_params.iteritems():
+            params += ' -e {} {}'.format(k, v)
+        self.command = 'uiautomator runtest {}{} -c {}'.format(self.target_uiauto_file, params, method_string)
+        self.target.push_file(self.uiauto_file, self.target_uiauto_file)
+        self.target.killall('uiautomator')
+
+    def run(self, context):
+        result = self.target.execute(self.command, self.run_timeout)
+        if 'FAILURE' in result:
+            raise WorkloadError(result)
+        else:
+            self.logger.debug(result)
+        time.sleep(DELAY)
+
+    def teardown(self, context):
+        self.target.delete_file(self.target_uiauto_file)
+
+
+class ReventGUI(object):
+
+    def __init__(self, workload, target, setup_timeout=5 * 60, run_timeout=10 * 60):
+        self.workload = workload
+        self.target = target
+        self.setup_timeout = setup_timeout
+        self.run_timeout = run_timeout
+        self.on_target_revent_binary = self.target.get_workpath('revent')
+        self.on_target_setup_revent = self.target.get_workpath('{}.setup.revent'.format(self.target.name))
+        self.on_target_run_revent = self.target.get_workpath('{}.run.revent'.format(self.target.name))
+        self.logger = logging.getLogger('revent')
+        self.revent_setup_file = None
+        self.revent_run_file = None
+
+    def init_resources(self, context):
+        self.revent_setup_file = context.resolver.get(ReventFile(self.workload, 'setup'))
+        self.revent_run_file = context.resolver.get(ReventFile(self.workload, 'run'))
+
+    def setup(self, context):
+        self._check_revent_files(context)
+        self.target.killall('revent')
+        command = '{} replay {}'.format(self.on_target_revent_binary, self.on_target_setup_revent)
+        self.target.execute(command, timeout=self.setup_timeout)
+
+    def run(self, context):
+        command = '{} replay {}'.format(self.on_target_revent_binary, self.on_target_run_revent)
+        self.logger.debug('Replaying {}'.format(os.path.basename(self.on_target_run_revent)))
+        self.target.execute(command, timeout=self.run_timeout)
+        self.logger.debug('Replay completed.')
+
+    def teardown(self, context):
+        self.target.remove(self.on_target_setup_revent)
+        self.target.remove(self.on_target_run_revent)
+
+    def _check_revent_files(self, context):
+        # check the revent binary
+        revent_binary = context.resolver.get(Executable(NO_ONE, self.target.abi, 'revent'))
+        if not os.path.isfile(revent_binary):
+            message = '{} does not exist. '.format(revent_binary)
+            message += 'Please build revent for your system and place it in that location'
+            raise WorkloadError(message)
+        if not self.revent_setup_file:
+            # pylint: disable=too-few-format-args
+            message = '{0}.setup.revent file does not exist, Please provide one for your target, {0}'
+            raise WorkloadError(message.format(self.target.name))
+        if not self.revent_run_file:
+            # pylint: disable=too-few-format-args
+            message = '{0}.run.revent file does not exist, Please provide one for your target, {0}'
+            raise WorkloadError(message.format(self.target.name))
+
+        self.on_target_revent_binary = self.target.install(revent_binary)
+        self.target.push(self.revent_run_file, self.on_target_run_revent)
+        self.target.push(self.revent_setup_file, self.on_target_setup_revent)
+
+
+class ApkHander(object):
+
+    def __init__(self, owner, target, view, install_timeout=300, version=None,
+                 strict=True, force_install=False, uninstall=False):
+        self.logger = logging.getLogger('apk')
+        self.owner = owner
+        self.target = target
+        self.version = version
+        self.apk_file = None
+        self.apk_info = None
+        self.apk_version = None
+        self.logcat_log = None
+
+    def init_resources(self, context):
+        self.apk_file = context.resolver.get(ApkFile(self.owner),
+                                             version=self.version,
+                                             strict=strict)
+        self.apk_info = ApkInfo(self.apk_file)
+
+    def setup(self, context):
+        self.initialize_package(context)
+        self.start_activity()
+        self.target.execute('am kill-all')  # kill all *background* activities
+        self.target.clear_logcat()
+
+    def initialize_package(self, context):
+        installed_version = self.target.get_package_version(self.apk_info.package)
+        if self.strict:
+            self.initialize_with_host_apk(context, installed_version)
+        else:
+            if not installed_version:
+                message = '''{} not found found on the device and check_apk is set to "False"
+                             so host version was not checked.'''
+                raise WorkloadError(message.format(self.package))
+            message = 'Version {} installed on device; skipping host APK check.'
+            self.logger.debug(message.format(installed_version))
+            self.reset(context)
+            self.version = installed_version
+
+    def initialize_with_host_apk(self, context, installed_version):
+        if installed_version != self.apk_file.version_name:
+            if installed_version:
+                message = '{} host version: {}, device version: {}; re-installing...'
+                self.logger.debug(message.format(os.path.basename(self.apk_file),
+                                                 host_version, installed_version))
+            else:
+                message = '{} host version: {}, not found on device; installing...'
+                self.logger.debug(message.format(os.path.basename(self.apk_file),
+                                                 host_version))
+            self.force_install = True  # pylint: disable=attribute-defined-outside-init
+        else:
+            message = '{} version {} found on both device and host.'
+            self.logger.debug(message.format(os.path.basename(self.apk_file),
+                                             host_version))
+        if self.force_install:
+            if installed_version:
+                self.device.uninstall(self.package)
+            self.install_apk(context)
+        else:
+            self.reset(context)
+        self.apk_version = host_version
+
+    def start_activity(self):
+        output = self.device.execute('am start -W -n {}/{}'.format(self.package, self.activity))
+        if 'Error:' in output:
+            self.device.execute('am force-stop {}'.format(self.package))  # this will dismiss any erro dialogs
+            raise WorkloadError(output)
+        self.logger.debug(output)
+
+    def reset(self, context):  # pylint: disable=W0613
+        self.device.execute('am force-stop {}'.format(self.package))
+        self.device.execute('pm clear {}'.format(self.package))
+
+    def install_apk(self, context):
+        output = self.device.install(self.apk_file, self.install_timeout)
+        if 'Failure' in output:
+            if 'ALREADY_EXISTS' in output:
+                self.logger.warn('Using already installed APK (did not unistall properly?)')
+            else:
+                raise WorkloadError(output)
+        else:
+            self.logger.debug(output)
+
+    def update_result(self, context):
+        self.logcat_log = os.path.join(context.output_directory, 'logcat.log')
+        self.device.dump_logcat(self.logcat_log)
+        context.add_iteration_artifact(name='logcat',
+                                       path='logcat.log',
+                                       kind='log',
+                                       description='Logact dump for the run.')
+
+    def teardown(self, context):
+        self.device.execute('am force-stop {}'.format(self.package))
+        if self.uninstall_apk:
+            self.device.uninstall(self.package)
diff --git a/wa/tests/__init__.py b/wa/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/wa/tests/data/extensions/devices/test_device.py b/wa/tests/data/extensions/devices/test_device.py
new file mode 100644
index 00000000..2c4d51ad
--- /dev/null
+++ b/wa/tests/data/extensions/devices/test_device.py
@@ -0,0 +1,50 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+from wa import Plugin
+
+
+class TestDevice(Plugin):
+
+    name = 'test-device'
+    kind = 'device'
+
+    def __init__(self, *args, **kwargs):
+        self.modules = []
+        self.boot_called = 0
+        self.push_file_called = 0
+        self.pull_file_called = 0
+        self.execute_called = 0
+        self.set_sysfile_int_called = 0
+        self.close_called = 0
+
+    def boot(self):
+        self.boot_called += 1
+
+    def push_file(self, source, dest):
+        self.push_file_called += 1
+
+    def pull_file(self, source, dest):
+        self.pull_file_called += 1
+
+    def execute(self, command):
+        self.execute_called += 1
+
+    def set_sysfile_int(self, file, value):
+        self.set_sysfile_int_called += 1
+
+    def close(self, command):
+        self.close_called += 1
diff --git a/wa/tests/data/interrupts/after b/wa/tests/data/interrupts/after
new file mode 100755
index 00000000..93145098
--- /dev/null
+++ b/wa/tests/data/interrupts/after
@@ -0,0 +1,98 @@
+           CPU0       CPU1       CPU2       CPU3       CPU4       CPU5       CPU6       CPU7       
+ 65:          0          0          0          0          0          0          0          0       GIC  dma-pl330.2
+ 66:          0          0          0          0          0          0          0          0       GIC  dma-pl330.0
+ 67:          0          0          0          0          0          0          0          0       GIC  dma-pl330.1
+ 74:          0          0          0          0          0          0          0          0       GIC  s3c2410-wdt
+ 85:          2          0          0          0          0          0          0          0       GIC  exynos4210-uart
+ 89:        368          0          0          0          0          0          0          0       GIC  s3c2440-i2c.1
+ 90:          0          0          0          0          0          0          0          0       GIC  s3c2440-i2c.2
+ 92:       1294          0          0          0          0          0          0          0       GIC  exynos5-hs-i2c.0
+ 95:        831          0          0          0          0          0          0          0       GIC  exynos5-hs-i2c.3
+103:          1          0          0          0          0          0          0          0       GIC  ehci_hcd:usb1, ohci_hcd:usb2
+104:       7304          0          0          0          0          0          0          0       GIC  xhci_hcd:usb3, exynos-ss-udc.0
+105:          0          0          0          0          0          0          0          0       GIC  xhci_hcd:usb5
+106:          0          0          0          0          0          0          0          0       GIC  mali.0
+107:      16429          0          0          0          0          0          0          0       GIC  dw-mci
+108:          1          0          0          0          0          0          0          0       GIC  dw-mci
+109:          0          0          0          0          0          0          0          0       GIC  dw-mci
+114:      28074          0          0          0          0          0          0          0       GIC  mipi-dsi
+117:          0          0          0          0          0          0          0          0       GIC  exynos-gsc
+118:          0          0          0          0          0          0          0          0       GIC  exynos-gsc
+121:          0          0          0          0          0          0          0          0       GIC  exynos5-jpeg-hx
+123:          7          0          0          0          0          0          0          0       GIC  s5p-fimg2d
+126:          0          0          0          0          0          0          0          0       GIC  s5p-mixer
+127:          0          0          0          0          0          0          0          0       GIC  hdmi-int
+128:          0          0          0          0          0          0          0          0       GIC  s5p-mfc-v6
+142:          0          0          0          0          0          0          0          0       GIC  dma-pl330.3
+146:          0          0          0          0          0          0          0          0       GIC  s5p-tvout-cec
+149:       1035          0          0          0          0          0          0          0       GIC  mali.0
+152:      26439          0          0          0          0          0          0          0       GIC  mct_tick0
+153:          0       2891          0          0          0          0          0          0       GIC  mct_tick1
+154:          0          0       3969          0          0          0          0          0       GIC  mct_tick2
+155:          0          0          0       2385          0          0          0          0       GIC  mct_tick3
+160:          0          0          0          0       8038          0          0          0       GIC  mct_tick4
+161:          0          0          0          0          0       8474          0          0       GIC  mct_tick5
+162:          0          0          0          0          0          0       7842          0       GIC  mct_tick6
+163:          0          0          0          0          0          0          0       7827       GIC  mct_tick7
+200:          0          0          0          0          0          0          0          0       GIC  exynos5-jpeg-hx
+201:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.29
+218:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.25
+220:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.27
+224:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.19
+251:        320          0          0          0          0          0          0          0       GIC  mali.0
+252:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler
+253:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler
+254:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler
+272:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.5
+274:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.6
+280:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.11
+282:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.30
+284:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.12
+286:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.17
+288:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.4
+290:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.20
+294:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9
+296:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9
+298:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9
+300:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9
+302:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16
+306:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.0
+316:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.2
+325:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.0
+332:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16
+340:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16
+342:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9
+344:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16
+405:        327          0          0          0          0          0          0          0  combiner  s3c_fb
+409:          0          0          0          0          0          0          0          0  combiner  mcuctl
+414:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.28
+434:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.22
+436:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.23
+438:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.26
+443:         12          0          0          0          0          0          0          0  combiner  mct_comp_irq
+446:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.21
+449:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.13
+453:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.15
+474:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.24
+512:          0          0          0          0          0          0          0          0  exynos-eint  gpio-keys: KEY_POWER
+518:          0          0          0          0          0          0          0          0  exynos-eint  drd_switch_vbus
+524:          0          0          0          0          0          0          0          0  exynos-eint  gpio-keys: KEY_HOMEPAGE
+526:          1          0          0          0          0          0          0          0  exynos-eint  HOST_DETECT
+527:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_id
+531:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_vbus
+532:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_id
+537:          3          0          0          0          0          0          0          0  exynos-eint  mxt540e_ts
+538:          0          0          0          0          0          0          0          0  exynos-eint  sec-pmic-irq
+543:          1          0          0          0          0          0          0          0  exynos-eint  hdmi-ext
+544:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_VOLUMEDOWN
+545:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_VOLUMEUP
+546:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_MENU
+547:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_BACK
+655:          0          0          0          0          0          0          0          0  sec-pmic  rtc-alarm0
+IPI0:          0          0          0          0          0          0          0          0  Timer broadcast interrupts
+IPI1:       8823       7185       4642       5652       2370       2069       1452       1351  Rescheduling interrupts
+IPI2:          4          7          8          6          8          7          8          8  Function call interrupts
+IPI3:          1          0          0          0          0          0          0          0  Single function call interrupts
+IPI4:          0          0          0          0          0          0          0          0  CPU stop interrupts
+IPI5:          0          0          0          0          0          0          0          0  CPU backtrace
+Err:          0
diff --git a/wa/tests/data/interrupts/before b/wa/tests/data/interrupts/before
new file mode 100755
index 00000000..a332b8e9
--- /dev/null
+++ b/wa/tests/data/interrupts/before
@@ -0,0 +1,97 @@
+           CPU0       CPU1       CPU2       CPU3       CPU4       CPU5       CPU6       CPU7       
+ 65:          0          0          0          0          0          0          0          0       GIC  dma-pl330.2
+ 66:          0          0          0          0          0          0          0          0       GIC  dma-pl330.0
+ 67:          0          0          0          0          0          0          0          0       GIC  dma-pl330.1
+ 74:          0          0          0          0          0          0          0          0       GIC  s3c2410-wdt
+ 85:          2          0          0          0          0          0          0          0       GIC  exynos4210-uart
+ 89:        368          0          0          0          0          0          0          0       GIC  s3c2440-i2c.1
+ 90:          0          0          0          0          0          0          0          0       GIC  s3c2440-i2c.2
+ 92:       1204          0          0          0          0          0          0          0       GIC  exynos5-hs-i2c.0
+ 95:        831          0          0          0          0          0          0          0       GIC  exynos5-hs-i2c.3
+103:          1          0          0          0          0          0          0          0       GIC  ehci_hcd:usb1, ohci_hcd:usb2
+104:       7199          0          0          0          0          0          0          0       GIC  xhci_hcd:usb3, exynos-ss-udc.0
+105:          0          0          0          0          0          0          0          0       GIC  xhci_hcd:usb5
+106:          0          0          0          0          0          0          0          0       GIC  mali.0
+107:      16429          0          0          0          0          0          0          0       GIC  dw-mci
+108:          1          0          0          0          0          0          0          0       GIC  dw-mci
+109:          0          0          0          0          0          0          0          0       GIC  dw-mci
+114:      26209          0          0          0          0          0          0          0       GIC  mipi-dsi
+117:          0          0          0          0          0          0          0          0       GIC  exynos-gsc
+118:          0          0          0          0          0          0          0          0       GIC  exynos-gsc
+121:          0          0          0          0          0          0          0          0       GIC  exynos5-jpeg-hx
+123:          7          0          0          0          0          0          0          0       GIC  s5p-fimg2d
+126:          0          0          0          0          0          0          0          0       GIC  s5p-mixer
+127:          0          0          0          0          0          0          0          0       GIC  hdmi-int
+128:          0          0          0          0          0          0          0          0       GIC  s5p-mfc-v6
+142:          0          0          0          0          0          0          0          0       GIC  dma-pl330.3
+146:          0          0          0          0          0          0          0          0       GIC  s5p-tvout-cec
+149:       1004          0          0          0          0          0          0          0       GIC  mali.0
+152:      26235          0          0          0          0          0          0          0       GIC  mct_tick0
+153:          0       2579          0          0          0          0          0          0       GIC  mct_tick1
+154:          0          0       3726          0          0          0          0          0       GIC  mct_tick2
+155:          0          0          0       2262          0          0          0          0       GIC  mct_tick3
+161:          0          0          0          0          0       2554          0          0       GIC  mct_tick5
+162:          0          0          0          0          0          0       1911          0       GIC  mct_tick6
+163:          0          0          0          0          0          0          0       1928       GIC  mct_tick7
+200:          0          0          0          0          0          0          0          0       GIC  exynos5-jpeg-hx
+201:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.29
+218:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.25
+220:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.27
+224:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.19
+251:        312          0          0          0          0          0          0          0       GIC  mali.0
+252:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler
+253:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler
+254:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler
+272:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.5
+274:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.6
+280:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.11
+282:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.30
+284:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.12
+286:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.17
+288:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.4
+290:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.20
+294:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9
+296:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9
+298:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9
+300:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9
+302:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16
+306:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.0
+316:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.2
+325:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.0
+332:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16
+340:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16
+342:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9
+344:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16
+405:        322          0          0          0          0          0          0          0  combiner  s3c_fb
+409:          0          0          0          0          0          0          0          0  combiner  mcuctl
+414:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.28
+434:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.22
+436:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.23
+438:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.26
+443:         12          0          0          0          0          0          0          0  combiner  mct_comp_irq
+446:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.21
+449:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.13
+453:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.15
+474:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.24
+512:          0          0          0          0          0          0          0          0  exynos-eint  gpio-keys: KEY_POWER
+518:          0          0          0          0          0          0          0          0  exynos-eint  drd_switch_vbus
+524:          0          0          0          0          0          0          0          0  exynos-eint  gpio-keys: KEY_HOMEPAGE
+526:          1          0          0          0          0          0          0          0  exynos-eint  HOST_DETECT
+527:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_id
+531:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_vbus
+532:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_id
+537:          3          0          0          0          0          0          0          0  exynos-eint  mxt540e_ts
+538:          0          0          0          0          0          0          0          0  exynos-eint  sec-pmic-irq
+543:          1          0          0          0          0          0          0          0  exynos-eint  hdmi-ext
+544:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_VOLUMEDOWN
+545:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_VOLUMEUP
+546:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_MENU
+547:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_BACK
+655:          0          0          0          0          0          0          0          0  sec-pmic  rtc-alarm0
+IPI0:          0          0          0          0          0          0          0          0  Timer broadcast interrupts
+IPI1:       8751       7147       4615       5623       2334       2066       1449       1348  Rescheduling interrupts
+IPI2:          3          6          7          6          7          6          7          7  Function call interrupts
+IPI3:          1          0          0          0          0          0          0          0  Single function call interrupts
+IPI4:          0          0          0          0          0          0          0          0  CPU stop interrupts
+IPI5:          0          0          0          0          0          0          0          0  CPU backtrace
+Err:          0
diff --git a/wa/tests/data/interrupts/result b/wa/tests/data/interrupts/result
new file mode 100755
index 00000000..b9ec2dd1
--- /dev/null
+++ b/wa/tests/data/interrupts/result
@@ -0,0 +1,98 @@
+        CPU0 CPU1 CPU2 CPU3 CPU4 CPU5 CPU6 CPU7                                      
+    65:    0    0    0    0    0    0    0    0                       GIC dma-pl330.2
+    66:    0    0    0    0    0    0    0    0                       GIC dma-pl330.0
+    67:    0    0    0    0    0    0    0    0                       GIC dma-pl330.1
+    74:    0    0    0    0    0    0    0    0                       GIC s3c2410-wdt
+    85:    0    0    0    0    0    0    0    0                   GIC exynos4210-uart
+    89:    0    0    0    0    0    0    0    0                     GIC s3c2440-i2c.1
+    90:    0    0    0    0    0    0    0    0                     GIC s3c2440-i2c.2
+    92:   90    0    0    0    0    0    0    0                  GIC exynos5-hs-i2c.0
+    95:    0    0    0    0    0    0    0    0                  GIC exynos5-hs-i2c.3
+   103:    0    0    0    0    0    0    0    0      GIC ehci_hcd:usb1, ohci_hcd:usb2
+   104:  105    0    0    0    0    0    0    0    GIC xhci_hcd:usb3, exynos-ss-udc.0
+   105:    0    0    0    0    0    0    0    0                     GIC xhci_hcd:usb5
+   106:    0    0    0    0    0    0    0    0                            GIC mali.0
+   107:    0    0    0    0    0    0    0    0                            GIC dw-mci
+   108:    0    0    0    0    0    0    0    0                            GIC dw-mci
+   109:    0    0    0    0    0    0    0    0                            GIC dw-mci
+   114: 1865    0    0    0    0    0    0    0                          GIC mipi-dsi
+   117:    0    0    0    0    0    0    0    0                        GIC exynos-gsc
+   118:    0    0    0    0    0    0    0    0                        GIC exynos-gsc
+   121:    0    0    0    0    0    0    0    0                   GIC exynos5-jpeg-hx
+   123:    0    0    0    0    0    0    0    0                        GIC s5p-fimg2d
+   126:    0    0    0    0    0    0    0    0                         GIC s5p-mixer
+   127:    0    0    0    0    0    0    0    0                          GIC hdmi-int
+   128:    0    0    0    0    0    0    0    0                        GIC s5p-mfc-v6
+   142:    0    0    0    0    0    0    0    0                       GIC dma-pl330.3
+   146:    0    0    0    0    0    0    0    0                     GIC s5p-tvout-cec
+   149:   31    0    0    0    0    0    0    0                            GIC mali.0
+   152:  204    0    0    0    0    0    0    0                         GIC mct_tick0
+   153:    0  312    0    0    0    0    0    0                         GIC mct_tick1
+   154:    0    0  243    0    0    0    0    0                         GIC mct_tick2
+   155:    0    0    0  123    0    0    0    0                         GIC mct_tick3
+>  160:    0    0    0    0 8038    0    0    0                         GIC mct_tick4
+   161:    0    0    0    0    0 5920    0    0                         GIC mct_tick5
+   162:    0    0    0    0    0    0 5931    0                         GIC mct_tick6
+   163:    0    0    0    0    0    0    0 5899                         GIC mct_tick7
+   200:    0    0    0    0    0    0    0    0                   GIC exynos5-jpeg-hx
+   201:    0    0    0    0    0    0    0    0                  GIC exynos-sysmmu.29
+   218:    0    0    0    0    0    0    0    0                  GIC exynos-sysmmu.25
+   220:    0    0    0    0    0    0    0    0                  GIC exynos-sysmmu.27
+   224:    0    0    0    0    0    0    0    0                  GIC exynos-sysmmu.19
+   251:    8    0    0    0    0    0    0    0                            GIC mali.0
+   252:    0    0    0    0    0    0    0    0                    GIC exynos5-scaler
+   253:    0    0    0    0    0    0    0    0                    GIC exynos5-scaler
+   254:    0    0    0    0    0    0    0    0                    GIC exynos5-scaler
+   272:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.5
+   274:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.6
+   280:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.11
+   282:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.30
+   284:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.12
+   286:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.17
+   288:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.4
+   290:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.20
+   294:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.9
+   296:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.9
+   298:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.9
+   300:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.9
+   302:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.16
+   306:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.0
+   316:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.2
+   325:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.0
+   332:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.16
+   340:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.16
+   342:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.9
+   344:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.16
+   405:    5    0    0    0    0    0    0    0                       combiner s3c_fb
+   409:    0    0    0    0    0    0    0    0                       combiner mcuctl
+   414:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.28
+   434:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.22
+   436:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.23
+   438:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.26
+   443:    0    0    0    0    0    0    0    0                 combiner mct_comp_irq
+   446:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.21
+   449:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.13
+   453:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.15
+   474:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.24
+   512:    0    0    0    0    0    0    0    0      exynos-eint gpio-keys: KEY_POWER
+   518:    0    0    0    0    0    0    0    0           exynos-eint drd_switch_vbus
+   524:    0    0    0    0    0    0    0    0   exynos-eint gpio-keys: KEY_HOMEPAGE
+   526:    0    0    0    0    0    0    0    0               exynos-eint HOST_DETECT
+   527:    0    0    0    0    0    0    0    0             exynos-eint drd_switch_id
+   531:    0    0    0    0    0    0    0    0           exynos-eint drd_switch_vbus
+   532:    0    0    0    0    0    0    0    0             exynos-eint drd_switch_id
+   537:    0    0    0    0    0    0    0    0                exynos-eint mxt540e_ts
+   538:    0    0    0    0    0    0    0    0              exynos-eint sec-pmic-irq
+   543:    0    0    0    0    0    0    0    0                  exynos-eint hdmi-ext
+   544:    0    0    0    0    0    0    0    0 s5p_gpioint gpio-keys: KEY_VOLUMEDOWN
+   545:    0    0    0    0    0    0    0    0   s5p_gpioint gpio-keys: KEY_VOLUMEUP
+   546:    0    0    0    0    0    0    0    0       s5p_gpioint gpio-keys: KEY_MENU
+   547:    0    0    0    0    0    0    0    0       s5p_gpioint gpio-keys: KEY_BACK
+   655:    0    0    0    0    0    0    0    0                   sec-pmic rtc-alarm0
+  IPI0:    0    0    0    0    0    0    0    0            Timer broadcast interrupts
+  IPI1:   72   38   27   29   36    3    3    3               Rescheduling interrupts
+  IPI2:    1    1    1    0    1    1    1    1              Function call interrupts
+  IPI3:    0    0    0    0    0    0    0    0       Single function call interrupts
+  IPI4:    0    0    0    0    0    0    0    0                   CPU stop interrupts
+  IPI5:    0    0    0    0    0    0    0    0                         CPU backtrace
+   Err:    0                                                                         
diff --git a/wa/tests/data/logcat.2.log b/wa/tests/data/logcat.2.log
new file mode 100644
index 00000000..eafed2b8
--- /dev/null
+++ b/wa/tests/data/logcat.2.log
@@ -0,0 +1,14 @@
+--------- beginning of /dev/log/main
+D/TextView( 2468): 7:07
+D/TextView( 2468): 7:07
+D/TextView( 2468): Thu, June 27
+--------- beginning of /dev/log/system
+D/TextView( 3099): CaffeineMark results
+D/TextView( 3099): Overall score:
+D/TextView( 3099): Rating
+D/TextView( 3099): Rank
+D/TextView( 3099): 0
+D/TextView( 3099): Details
+D/TextView( 3099): Publish
+D/TextView( 3099): Top 10
+D/TextView( 3099): 3672
diff --git a/wa/tests/data/logcat.log b/wa/tests/data/logcat.log
new file mode 100644
index 00000000..48703402
--- /dev/null
+++ b/wa/tests/data/logcat.log
@@ -0,0 +1,10 @@
+--------- beginning of /dev/log/main
+--------- beginning of /dev/log/system
+D/TextView( 2462): 5:05
+D/TextView( 2462): 5:05
+D/TextView( 2462): Mon, June 24
+D/TextView( 3072): Stop Test
+D/TextView( 3072): Testing CPU and memory…
+D/TextView( 3072): 0%
+D/TextView( 3072): Testing CPU and memory…
+
diff --git a/wa/tests/data/test-agenda.yaml b/wa/tests/data/test-agenda.yaml
new file mode 100644
index 00000000..85163a40
--- /dev/null
+++ b/wa/tests/data/test-agenda.yaml
@@ -0,0 +1,25 @@
+global: 
+        iterations: 8
+        boot_parameters:
+                os_mode: mp_a15_bootcluster
+        runtime_parameters:
+                a7_governor: Interactive
+                a15_governor: Interactive2
+                a7_cores: 3
+                a15_cores: 2
+workloads:
+        - id: 1c
+          workload_name: bbench_with_audio
+        - id: 1d
+          workload_name: Bbench_with_audio
+          runtime_parameters:
+                os_mode: mp_a7_only
+                a7_cores: 0
+          iterations: 4
+        - id: 1e
+          workload_name: audio
+        - id: 1f
+          workload_name: antutu
+          runtime_parameters:
+                a7_cores: 1
+                a15_cores: 1
diff --git a/wa/tests/data/test-config.py b/wa/tests/data/test-config.py
new file mode 100644
index 00000000..56c3288b
--- /dev/null
+++ b/wa/tests/data/test-config.py
@@ -0,0 +1,17 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+device = 'TEST'
diff --git a/wa/tests/test_agenda.py b/wa/tests/test_agenda.py
new file mode 100644
index 00000000..38f2fca9
--- /dev/null
+++ b/wa/tests/test_agenda.py
@@ -0,0 +1,195 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+# pylint: disable=E0611
+# pylint: disable=R0201
+import os
+from StringIO import StringIO
+from unittest import TestCase
+
+from nose.tools import assert_equal, assert_in, raises
+
+from wa.framework.agenda import Agenda
+from wa.framework.exception import ConfigError
+
+
+YAML_TEST_FILE = os.path.join(os.path.dirname(__file__), 'data', 'test-agenda.yaml')
+
+invalid_agenda_text = """
+workloads:
+    - id: 1
+      workload_parameters:
+          test: 1
+"""
+invalid_agenda = StringIO(invalid_agenda_text)
+invalid_agenda.name = 'invalid1'
+
+duplicate_agenda_text = """
+global:
+    iterations: 1
+workloads:
+    - id: 1
+      workload_name: antutu
+      workload_parameters:
+          test: 1
+    - id: 1
+      workload_name: andebench
+"""
+duplicate_agenda = StringIO(duplicate_agenda_text)
+duplicate_agenda.name = 'invalid2'
+
+short_agenda_text = """
+workloads: [antutu, linpack, andebench]
+"""
+short_agenda = StringIO(short_agenda_text)
+short_agenda.name = 'short'
+
+default_ids_agenda_text = """
+workloads:
+    - antutu
+    - id: 1
+      name: linpack
+    - id: test
+      name: andebench
+      params:
+          number_of_threads: 1
+    - vellamo
+"""
+default_ids_agenda = StringIO(default_ids_agenda_text)
+default_ids_agenda.name = 'default_ids'
+
+sectioned_agenda_text = """
+sections:
+    - id: sec1
+      runtime_params:
+        dp: one
+      workloads:
+        - antutu
+        - andebench
+        - name: linpack
+          runtime_params:
+            dp: two
+    - id: sec2
+      runtime_params:
+        dp: three
+      workloads:
+        - antutu
+workloads:
+    - nenamark
+"""
+sectioned_agenda = StringIO(sectioned_agenda_text)
+sectioned_agenda.name = 'sectioned'
+
+dup_sectioned_agenda_text = """
+sections:
+    - id: sec1
+      workloads:
+        - antutu
+    - id: sec1
+      workloads:
+        - andebench
+workloads:
+    - nenamark
+"""
+dup_sectioned_agenda = StringIO(dup_sectioned_agenda_text)
+dup_sectioned_agenda.name = 'dup-sectioned'
+
+caps_agenda_text = """
+config:
+    device: TC2
+global:
+    runtime_parameters:
+        sysfile_values:
+            /sys/test/MyFile: 1
+            /sys/test/other file: 2
+workloads:
+    - id: 1
+      name: linpack
+"""
+caps_agenda = StringIO(caps_agenda_text)
+caps_agenda.name = 'caps'
+
+bad_syntax_agenda_text = """
+config:
+    # tab on the following line
+	reboot_policy: never
+workloads:
+    - antutu
+"""
+bad_syntax_agenda = StringIO(bad_syntax_agenda_text)
+bad_syntax_agenda.name = 'bad_syntax'
+
+section_ids_test_text = """
+config:
+    device: TC2
+    reboot_policy: never
+workloads:
+    - name: bbench
+      id: bbench
+    - name: audio
+sections:
+    - id: foo
+    - id: bar
+"""
+section_ids_agenda = StringIO(section_ids_test_text)
+section_ids_agenda.name = 'section_ids'
+
+
+class AgendaTest(TestCase):
+
+    def test_yaml_load(self):
+        agenda = Agenda(YAML_TEST_FILE)
+        assert_equal(len(agenda.workloads), 4)
+
+    def test_duplicate_id(self):
+        try:
+            Agenda(duplicate_agenda)
+        except ConfigError, e:
+            assert_in('duplicate', e.message.lower())  # pylint: disable=E1101
+        else:
+            raise Exception('ConfigError was not raised for an agenda with duplicate ids.')
+
+    def test_yaml_missing_field(self):
+        try:
+            Agenda(invalid_agenda_text)
+        except ConfigError, e:
+            assert_in('workload name', e.message)
+        else:
+            raise Exception('ConfigError was not raised for an invalid agenda.')
+
+    def test_defaults(self):
+        agenda = Agenda(short_agenda)
+        assert_equal(len(agenda.workloads), 3)
+        assert_equal(agenda.workloads[0].workload_name, 'antutu')
+        assert_equal(agenda.workloads[0].id, '1')
+
+    def test_default_id_assignment(self):
+        agenda = Agenda(default_ids_agenda)
+        assert_equal(agenda.workloads[0].id, '2')
+        assert_equal(agenda.workloads[3].id, '3')
+
+    def test_sections(self):
+        agenda = Agenda(sectioned_agenda)
+        assert_equal(agenda.sections[0].workloads[0].workload_name, 'antutu')
+        assert_equal(agenda.sections[1].runtime_parameters['dp'], 'three')
+
+    @raises(ConfigError)
+    def test_dup_sections(self):
+        Agenda(dup_sectioned_agenda)
+
+    @raises(ConfigError)
+    def test_bad_syntax(self):
+        Agenda(bad_syntax_agenda)
diff --git a/wa/tests/test_config.py b/wa/tests/test_config.py
new file mode 100644
index 00000000..ff0faec2
--- /dev/null
+++ b/wa/tests/test_config.py
@@ -0,0 +1,21 @@
+import unittest
+from nose.tools import assert_equal
+
+from wa.framework.configuration import merge_config_values
+
+
+class TestConfigUtils(unittest.TestCase):
+
+    def test_merge_values(self):
+        test_cases = [
+            ('a', 3, 3),
+            ('a', [1, 2], ['a', 1, 2]),
+            ({1: 2}, [3, 4], [{1: 2}, 3, 4]),
+            (set([2]), [1, 2, 3], [2, 1, 2, 3]),
+            ([1, 2, 3], set([2]), set([1, 2, 3])),
+            ([1, 2], None, [1, 2]),
+            (None, 'a', 'a'),
+        ]
+        for v1, v2, expected in test_cases:
+            assert_equal(merge_config_values(v1, v2), expected)
+
diff --git a/wa/tests/test_diff.py b/wa/tests/test_diff.py
new file mode 100644
index 00000000..cc1683cc
--- /dev/null
+++ b/wa/tests/test_diff.py
@@ -0,0 +1,44 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+# pylint: disable=E0611
+# pylint: disable=R0201
+import os
+import tempfile
+from unittest import TestCase
+
+from nose.tools import assert_equal
+
+from wlauto.instrumentation.misc import _diff_interrupt_files
+
+
+class InterruptDiffTest(TestCase):
+
+    def test_interrupt_diff(self):
+        file_dir = os.path.join(os.path.dirname(__file__), 'data', 'interrupts')
+        before_file = os.path.join(file_dir, 'before')
+        after_file = os.path.join(file_dir, 'after')
+        expected_result_file = os.path.join(file_dir, 'result')
+        output_file = tempfile.mktemp()
+
+        _diff_interrupt_files(before_file, after_file, output_file)
+        with open(output_file) as fh:
+            output_diff = fh.read()
+        with open(expected_result_file) as fh:
+            expected_diff = fh.read()
+        assert_equal(output_diff, expected_diff)
+
+
diff --git a/wa/tests/test_execution.py b/wa/tests/test_execution.py
new file mode 100644
index 00000000..eb4cbfbb
--- /dev/null
+++ b/wa/tests/test_execution.py
@@ -0,0 +1,164 @@
+import os
+import sys
+import unittest
+from StringIO import StringIO
+from mock import Mock
+from nose.tools import assert_true, assert_false, assert_equal
+
+from wa.framework import signal
+from wa.framework.agenda import Agenda
+from wa.framework.run import RunnerJob
+from wa.framework.execution import agenda_iterator
+
+sys.path.insert(0, os.path.dirname(__file__))
+from testutils import SignalWatcher
+
+
+class TestAgendaIteration(unittest.TestCase):
+
+    def setUp(self):
+        agenda_text = """
+        global:
+            iterations: 2
+        sections:
+            - id: a
+            - id: b
+              workloads:
+                - id: 1
+                  name: bbench
+        workloads:
+            - id: 2
+              name: dhrystone
+            - id: 3
+              name: coremark
+              iterations: 1
+        """
+        agenda_file = StringIO(agenda_text)
+        agenda_file.name = 'agenda'
+        self.agenda = Agenda(agenda_file)
+
+    def test_iteration_by_iteration(self):
+        specs = ['{}-{}'.format(s.id, w.id) 
+                 for _, s, w, _  
+                 in agenda_iterator(self.agenda, 'by_iteration')]
+        assert_equal(specs,
+                     ['a-2', 'b-2', 'a-3', 'b-3', 'b-1', 'a-2', 'b-2', 'b-1'])
+
+    def test_iteration_by_section(self):
+        specs = ['{}-{}'.format(s.id, w.id) 
+                 for _, s, w, _  
+                 in agenda_iterator(self.agenda, 'by_section')]
+        assert_equal(specs,
+                     ['a-2', 'a-3', 'b-2', 'b-3', 'b-1', 'a-2', 'b-2', 'b-1'])
+
+    def test_iteration_by_spec(self):
+        specs = ['{}-{}'.format(s.id, w.id) 
+                 for _, s, w, _  in 
+                 agenda_iterator(self.agenda, 'by_spec')]
+        assert_equal(specs,
+                     ['a-2', 'a-2', 'a-3', 'b-2', 'b-2', 'b-3', 'b-1', 'b-1'])
+
+
+class FakeWorkloadLoader(object):
+
+    def get_workload(self, name, target, **params):
+        workload = Mock()
+        workload.name = name
+        workload.target = target
+        workload.parameters = params
+        return workload
+
+
+class WorkloadExecutionWatcher(SignalWatcher):
+
+    signals = [
+        signal.BEFORE_WORKLOAD_SETUP,
+        signal.SUCCESSFUL_WORKLOAD_SETUP,
+        signal.AFTER_WORKLOAD_SETUP,
+        signal.BEFORE_WORKLOAD_EXECUTION,
+        signal.SUCCESSFUL_WORKLOAD_EXECUTION,
+        signal.AFTER_WORKLOAD_EXECUTION,
+        signal.BEFORE_WORKLOAD_RESULT_UPDATE,
+        signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE,
+        signal.AFTER_WORKLOAD_RESULT_UPDATE,
+        signal.BEFORE_WORKLOAD_TEARDOWN,
+        signal.SUCCESSFUL_WORKLOAD_TEARDOWN,
+        signal.AFTER_WORKLOAD_TEARDOWN,
+    ]
+
+
+class TestWorkloadExecution(unittest.TestCase):
+
+    def setUp(self):
+        params = {
+            'target': Mock(),
+            'context': Mock(),
+            'loader': FakeWorkloadLoader(),
+        }
+        data = {
+            'id': 'test',
+            'workload': 'test',
+            'label': None,
+            'parameters': None,
+        }
+        self.job = RunnerJob('job1', 'execute-workload-job', params, data)
+        self.workload = self.job.actor.workload
+        self.watcher = WorkloadExecutionWatcher()
+
+    def test_normal_flow(self):
+        self.job.run()
+        assert_true(self.workload.setup.called)
+        assert_true(self.workload.run.called)
+        assert_true(self.workload.update_result.called)
+        assert_true(self.workload.teardown.called)
+        self.watcher.assert_all_called()
+
+    def test_failed_run(self):
+        def bad(self):
+            raise Exception()
+        self.workload.run = bad
+        try:
+            self.job.run()
+        except Exception:
+            pass
+        assert_true(self.workload.setup.called)
+        assert_false(self.workload.update_result.called)
+        assert_true(self.workload.teardown.called)
+
+        assert_true(self.watcher.before_workload_setup.called)
+        assert_true(self.watcher.successful_workload_setup.called)
+        assert_true(self.watcher.after_workload_setup.called)
+        assert_true(self.watcher.before_workload_execution.called)
+        assert_false(self.watcher.successful_workload_execution.called)
+        assert_true(self.watcher.after_workload_execution.called)
+        assert_true(self.watcher.before_workload_result_update.called)
+        assert_false(self.watcher.successful_workload_result_update.called)
+        assert_true(self.watcher.after_workload_result_update.called)
+        assert_true(self.watcher.before_workload_teardown.called)
+        assert_true(self.watcher.successful_workload_teardown.called)
+        assert_true(self.watcher.after_workload_teardown.called)
+
+    def test_failed_setup(self):
+        def bad(self):
+            raise Exception()
+        self.workload.setup = bad
+        try:
+            self.job.run()
+        except Exception:
+            pass
+        assert_false(self.workload.run.called)
+        assert_false(self.workload.update_result.called)
+        assert_false(self.workload.teardown.called)
+
+        assert_true(self.watcher.before_workload_setup.called)
+        assert_false(self.watcher.successful_workload_setup.called)
+        assert_true(self.watcher.after_workload_setup.called)
+        assert_false(self.watcher.before_workload_execution.called)
+        assert_false(self.watcher.successful_workload_execution.called)
+        assert_false(self.watcher.after_workload_execution.called)
+        assert_false(self.watcher.before_workload_result_update.called)
+        assert_false(self.watcher.successful_workload_result_update.called)
+        assert_false(self.watcher.after_workload_result_update.called)
+        assert_false(self.watcher.before_workload_teardown.called)
+        assert_false(self.watcher.successful_workload_teardown.called)
+        assert_false(self.watcher.after_workload_teardown.called)
diff --git a/wa/tests/test_plugin.py b/wa/tests/test_plugin.py
new file mode 100644
index 00000000..f47f488a
--- /dev/null
+++ b/wa/tests/test_plugin.py
@@ -0,0 +1,248 @@
+#    Copyright 2014-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+# pylint: disable=E0611,R0201,E1101
+import os
+from unittest import TestCase
+
+from nose.tools import assert_equal, raises, assert_true
+
+from wa.framework.plugin import Plugin, PluginMeta, PluginLoader, Parameter
+from wa.utils.types import list_of_ints
+from wa import ConfigError
+
+
+EXTDIR = os.path.join(os.path.dirname(__file__), 'data', 'extensions')
+
+
+class PluginLoaderTest(TestCase):
+
+    def setUp(self):
+        self.loader = PluginLoader(paths=[EXTDIR, ])
+
+    def test_load_device(self):
+        device = self.loader.get_device('test-device')
+        assert_equal(device.name, 'test-device')
+
+    def test_list_by_kind(self):
+        exts = self.loader.list_devices()
+        assert_equal(len(exts), 1)
+        assert_equal(exts[0].name, 'test-device')
+
+
+
+class MyMeta(PluginMeta):
+
+    virtual_methods = ['validate', 'virtual1', 'virtual2']
+
+
+class MyBasePlugin(Plugin):
+
+    __metaclass__ = MyMeta
+
+    name = 'base'
+    kind = 'test'
+
+    parameters = [
+        Parameter('base'),
+    ]
+
+    def __init__(self, **kwargs):
+        super(MyBasePlugin, self).__init__(**kwargs)
+        self.v1 = 0
+        self.v2 = 0
+        self.v3 = ''
+
+    def virtual1(self):
+        self.v1 += 1
+        self.v3 = 'base'
+
+    def virtual2(self):
+        self.v2 += 1
+
+
+class MyAcidPlugin(MyBasePlugin):
+
+    name = 'acid'
+
+    parameters = [
+        Parameter('hydrochloric', kind=list_of_ints, default=[1, 2]),
+        Parameter('citric'),
+        Parameter('carbonic', kind=int),
+    ]
+
+    def __init__(self, **kwargs):
+        super(MyAcidPlugin, self).__init__(**kwargs)
+        self.vv1 = 0
+        self.vv2 = 0
+
+    def virtual1(self):
+        self.vv1 += 1
+        self.v3 = 'acid'
+
+    def virtual2(self):
+        self.vv2 += 1
+
+
+class MyOtherPlugin(MyBasePlugin):
+
+    name = 'other'
+
+    parameters = [
+        Parameter('mandatory', mandatory=True),
+        Parameter('optional', allowed_values=['test', 'check']),
+    ]
+
+class MyOtherOtherPlugin(MyOtherPlugin):
+
+    name = 'otherother'
+
+    parameters = [
+        Parameter('mandatory', override=True),
+    ]
+
+
+class MyOverridingPlugin(MyAcidPlugin):
+
+    name = 'overriding'
+
+    parameters = [
+        Parameter('hydrochloric', override=True, default=[3, 4]),
+    ]
+
+
+class MyThirdTeerPlugin(MyOverridingPlugin):
+
+    name = 'thirdteer'
+
+
+class MultiValueParamExt(Plugin):
+
+    name = 'multivalue'
+    kind = 'test'
+
+    parameters = [
+        Parameter('test', kind=list_of_ints, allowed_values=[42, 7, 73]),
+    ]
+
+
+class PluginMetaTest(TestCase):
+
+    def test_propagation(self):
+        acid_params = [p.name for p in MyAcidPlugin.parameters]
+        assert_equal(acid_params, ['base', 'hydrochloric', 'citric', 'carbonic'])
+
+    @raises(ValueError)
+    def test_duplicate_param_spec(self):
+        class BadPlugin(MyBasePlugin):  # pylint: disable=W0612
+            parameters = [
+                Parameter('base'),
+            ]
+
+    def test_param_override(self):
+        class OverridingPlugin(MyBasePlugin):  # pylint: disable=W0612
+            parameters = [
+                Parameter('base', override=True, default='cheese'),
+            ]
+        assert_equal(OverridingPlugin.parameters['base'].default, 'cheese')
+
+    @raises(ValueError)
+    def test_invalid_param_spec(self):
+        class BadPlugin(MyBasePlugin):  # pylint: disable=W0612
+            parameters = [
+                7,
+            ]
+
+    def test_virtual_methods(self):
+        acid = MyAcidPlugin()
+        acid.virtual1()
+        assert_equal(acid.v1, 1)
+        assert_equal(acid.vv1, 1)
+        assert_equal(acid.v2, 0)
+        assert_equal(acid.vv2, 0)
+        assert_equal(acid.v3, 'acid')
+        acid.virtual2()
+        acid.virtual2()
+        assert_equal(acid.v1, 1)
+        assert_equal(acid.vv1, 1)
+        assert_equal(acid.v2, 2)
+        assert_equal(acid.vv2, 2)
+
+    def test_initialization(self):
+        class MyExt(Plugin):
+            name = 'myext'
+            kind = 'test'
+            values = {'a': 0}
+            def __init__(self, *args, **kwargs):
+                super(MyExt, self).__init__(*args, **kwargs)
+                self.instance_init = 0
+            def initialize(self, context):
+                self.values['a'] += 1
+
+        class MyChildExt(MyExt):
+            name = 'mychildext'
+            def initialize(self, context):
+                self.instance_init += 1
+
+        ext = MyChildExt()
+        ext.initialize(None)
+
+        assert_equal(MyExt.values['a'], 1)
+        assert_equal(ext.instance_init, 1)
+
+
+class ParametersTest(TestCase):
+
+    def test_setting(self):
+        myext = MyAcidPlugin(hydrochloric=[5, 6], citric=5, carbonic=42)
+        assert_equal(myext.hydrochloric, [5, 6])
+        assert_equal(myext.citric, '5')
+        assert_equal(myext.carbonic, 42)
+
+    def test_validation_ok(self):
+        myext = MyOtherPlugin(mandatory='check', optional='check')
+        myext.validate()
+
+    def test_default_override(self):
+        myext = MyOverridingPlugin()
+        assert_equal(myext.hydrochloric, [3, 4])
+        myotherext = MyThirdTeerPlugin()
+        assert_equal(myotherext.hydrochloric, [3, 4])
+
+    def test_multivalue_param(self):
+        myext = MultiValueParamExt(test=[7, 42])
+        myext.validate()
+        assert_equal(myext.test, [7, 42])
+
+    @raises(ConfigError)
+    def test_bad_multivalue_param(self):
+        myext = MultiValueParamExt(test=[5])
+        myext.validate()
+
+    @raises(ConfigError)
+    def test_validation_no_mandatory(self):
+        myext = MyOtherPlugin(optional='check')
+        myext.validate()
+
+    @raises(ConfigError)
+    def test_validation_no_mandatory_in_derived(self):
+        MyOtherOtherPlugin()
+
+    @raises(ConfigError)
+    def test_validation_bad_value(self):
+        myext = MyOtherPlugin(mandatory=1, optional='invalid')
+        myext.validate()
+
diff --git a/wa/tests/test_runner.py b/wa/tests/test_runner.py
new file mode 100644
index 00000000..e2e9acd8
--- /dev/null
+++ b/wa/tests/test_runner.py
@@ -0,0 +1,44 @@
+import os
+import sys
+import shutil
+import tempfile
+import unittest
+
+from mock import Mock
+from nose.tools import assert_true, assert_false, assert_equal, assert_almost_equal
+
+from wa.framework import pluginloader
+from wa.framework.output import RunOutput
+from wa.framework.run import Runner, RunnerJob, runmethod, reset_runmethods
+from wa.utils.serializer import json
+
+
+
+class RunnerTest(unittest.TestCase):
+
+    def setUp(self):
+        self.output = RunOutput(tempfile.mktemp())
+        self.output.initialize()
+
+    def tearDown(self):
+        shutil.rmtree(self.output.output_directory)
+
+    def test_run_init(self):
+        runner = Runner(self.output)
+        runner.initialize()
+        runner.finalize()
+        assert_true(runner.info.name)
+        assert_true(runner.info.start_time)
+        assert_true(runner.info.end_time)
+        assert_almost_equal(runner.info.duration,
+                            runner.info.end_time -
+                            runner.info.start_time)
+
+    def test_normal_run(self):
+        runner = Runner(self.output)
+        runner.add_job(1, Mock())
+        runner.add_job(2, Mock())
+        runner.initialize()
+        runner.run()
+        runner.finalize()
+        assert_equal(len(runner.completed_jobs), 2)
diff --git a/wa/tests/test_signal.py b/wa/tests/test_signal.py
new file mode 100644
index 00000000..fe90d0d4
--- /dev/null
+++ b/wa/tests/test_signal.py
@@ -0,0 +1,63 @@
+import unittest
+
+from nose.tools import assert_equal, assert_true, assert_false
+
+import wa.framework.signal as signal
+
+
+class Callable(object):
+
+    def __init__(self, val):
+        self.val = val
+
+    def __call__(self):
+        return self.val
+
+
+class TestPriorityDispatcher(unittest.TestCase):
+
+    def test_ConnectNotify(self):
+        one = Callable(1)
+        two = Callable(2)
+        three = Callable(3)
+        signal.connect(
+            two,
+            'test',
+            priority=200
+        )
+        signal.connect(
+            one,
+            'test',
+            priority=100
+        )
+        signal.connect(
+            three,
+            'test',
+            priority=300
+        )
+        result = [i[1] for i in signal.send('test')]
+        assert_equal(result, [3, 2, 1])
+
+    def test_wrap_propagate(self):
+        d = {'before': False, 'after': False, 'success': False}
+        def before():
+            d['before'] = True
+        def after():
+            d['after'] = True
+        def success():
+            d['success'] = True
+        signal.connect(before, signal.BEFORE_WORKLOAD_SETUP)
+        signal.connect(after, signal.AFTER_WORKLOAD_SETUP)
+        signal.connect(success, signal.SUCCESSFUL_WORKLOAD_SETUP)
+
+        caught = False
+        try:
+            with signal.wrap('WORKLOAD_SETUP'):
+                raise RuntimeError()
+        except RuntimeError:
+            caught=True
+
+        assert_true(d['before'])
+        assert_true(d['after'])
+        assert_true(caught)
+        assert_false(d['success'])
diff --git a/wa/tests/test_utils.py b/wa/tests/test_utils.py
new file mode 100644
index 00000000..cd6d4526
--- /dev/null
+++ b/wa/tests/test_utils.py
@@ -0,0 +1,171 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+# pylint: disable=R0201
+from unittest import TestCase
+
+from nose.tools import raises, assert_equal, assert_not_equal, assert_in, assert_not_in
+from nose.tools import assert_true, assert_false
+
+from wa.utils.types import list_or_integer, list_or_bool, caseless_string, arguments, prioritylist, TreeNode
+
+
+class TestPriorityList(TestCase):
+
+    def test_insert(self):
+        pl = prioritylist()
+        elements = {3: "element 3",
+                    2: "element 2",
+                    1: "element 1",
+                    5: "element 5",
+                    4: "element 4"
+                    }
+        for key in elements:
+            pl.add(elements[key], priority=key)
+
+        match = zip(sorted(elements.values()), pl[:])
+        for pair in match:
+            assert(pair[0] == pair[1])
+
+    def test_delete(self):
+        pl = prioritylist()
+        elements = {2: "element 3",
+                    1: "element 2",
+                    0: "element 1",
+                    4: "element 5",
+                    3: "element 4"
+                    }
+        for key in elements:
+            pl.add(elements[key], priority=key)
+        del elements[2]
+        del pl[2]
+        match = zip(sorted(elements.values()), pl[:])
+        for pair in match:
+            assert(pair[0] == pair[1])
+
+    def test_multiple(self):
+        pl = prioritylist()
+        pl.add('1', 1)
+        pl.add('2.1', 2)
+        pl.add('3', 3)
+        pl.add('2.2', 2)
+        it = iter(pl)
+        assert_equal(it.next(), '3')
+        assert_equal(it.next(), '2.1')
+        assert_equal(it.next(), '2.2')
+        assert_equal(it.next(), '1')
+
+    def test_iterator_break(self):
+        pl = prioritylist()
+        pl.add('1', 1)
+        pl.add('2.1', 2)
+        pl.add('3', 3)
+        pl.add('2.2', 2)
+        for i in pl:
+            if i == '2.1':
+                break
+        assert_equal(pl.index('3'), 3)
+
+    def test_add_before_after(self):
+        pl = prioritylist()
+        pl.add('m', 1)
+        pl.add('a', 2)
+        pl.add('n', 1)
+        pl.add('b', 2)
+        pl.add_before('x', 'm')
+        assert_equal(list(pl), ['a', 'b', 'x', 'm', 'n'])
+        pl.add_after('y', 'b')
+        assert_equal(list(pl), ['a', 'b','y', 'x', 'm', 'n'])
+        pl.add_after('z', 'm')
+        assert_equal(list(pl), ['a', 'b', 'y', 'x', 'm', 'z', 'n'])
+
+
+class TestTreeNode(TestCase):
+
+    def test_addremove(self):
+        n1, n2, n3 = TreeNode(), TreeNode(), TreeNode()
+        n1.add_child(n2)
+        n3.parent = n2
+        assert_equal(n2.parent, n1)
+        assert_in(n3, n2.children)
+        n2.remove_child(n3)
+        assert_equal(n3.parent, None)
+        assert_not_in(n3, n2.children)
+        n1.add_child(n2)  # duplicat add
+        assert_equal(n1.children, [n2])
+
+    def test_ancestor_descendant(self):
+        n1, n2a, n2b, n3 = TreeNode(), TreeNode(), TreeNode(), TreeNode()
+        n1.add_child(n2a)
+        n1.add_child(n2b)
+        n2a.add_child(n3)
+        assert_equal(list(n3.iter_ancestors()), [n3, n2a, n1])
+        assert_equal(list(n1.iter_descendants()), [n2a, n3, n2b])
+        assert_true(n1.has_descendant(n3))
+        assert_true(n3.has_ancestor(n1))
+        assert_false(n3.has_ancestor(n2b))
+
+    def test_root(self):
+        n1, n2, n3 = TreeNode(), TreeNode(), TreeNode()
+        n1.add_child(n2)
+        n2.add_child(n3)
+        assert_true(n1.is_root)
+        assert_false(n2.is_root)
+        assert_equal(n3.get_root(), n1)
+
+    def test_common_ancestor(self):
+        n1, n2, n3a, n3b, n4, n5 = TreeNode(), TreeNode(), TreeNode(), TreeNode(), TreeNode(), TreeNode()
+        n1.add_child(n2)
+        n2.add_child(n3a)
+        n2.add_child(n3b)
+        n3b.add_child(n4)
+        n3a.add_child(n5)
+        assert_equal(n4.get_common_ancestor(n3a), n2)
+        assert_equal(n3a.get_common_ancestor(n4), n2)
+        assert_equal(n3b.get_common_ancestor(n4), n3b)
+        assert_equal(n4.get_common_ancestor(n3b), n3b)
+        assert_equal(n4.get_common_ancestor(n5), n2)
+
+    def test_iteration(self):
+        n1, n2, n3, n4, n5 = TreeNode(), TreeNode(), TreeNode(), TreeNode(), TreeNode()
+        n1.add_child(n2)
+        n2.add_child(n3)
+        n3.add_child(n4)
+        n4.add_child(n5)
+        ancestors = [a for a in n5.iter_ancestors(upto=n2)]
+        assert_equal(ancestors, [n5, n4, n3])
+        ancestors = [a for a in n5.iter_ancestors(after=n2)]
+        assert_equal(ancestors, [n2, n1])
+
+    @raises(ValueError)
+    def test_trivial_loop(self):
+        n1, n2, n3 = TreeNode(), TreeNode(), TreeNode()
+        n1.add_child(n2)
+        n2.add_child(n3)
+        n3.add_child(n1)
+
+    @raises(ValueError)
+    def test_tree_violation(self):
+        n1, n2a, n2b, n3 = TreeNode(), TreeNode(), TreeNode(), TreeNode()
+        n1.add_child(n2a)
+        n1.add_child(n2b)
+        n2a.add_child(n3)
+        n2b.add_child(n3)
+
+    @raises(ValueError)
+    def test_self_parent(self):
+        n = TreeNode()
+        n.add_child(n)
diff --git a/wa/tests/testutils.py b/wa/tests/testutils.py
new file mode 100644
index 00000000..ed49a768
--- /dev/null
+++ b/wa/tests/testutils.py
@@ -0,0 +1,39 @@
+from mock import Mock
+from nose.tools import assert_true
+
+from wa.framework import signal
+from wa.framework.plugin import Plugin
+from wa.utils.types import identifier
+
+
+class SignalWatcher(object):
+
+    signals = []
+
+    def __init__(self):
+        for sig in self.signals:
+            name = identifier(sig.name)
+            callback = Mock()
+            callback.im_func.__name__ = name
+            setattr(self, name, callback)
+            signal.connect(getattr(self, name), sig)
+
+    def assert_all_called(self):
+        for m in self.__dict__.itervalues():
+            assert_true(m.called)
+
+
+class MockContainerActor(Plugin):
+
+    name = 'mock-container'
+    kind = 'container-actor'
+
+    def __init__(self, owner=None, *args, **kwargs):
+        super(MockContainerActor, self).__init__(*args, **kwargs)
+        self.owner=owner
+        self.initialize = Mock()
+        self.finalize = Mock()
+        self.enter = Mock()
+        self.exit = Mock()
+        self.job_started = Mock()
+        self.job_completed = Mock()
diff --git a/wa/utils/__init__.py b/wa/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/wa/utils/counter.py b/wa/utils/counter.py
new file mode 100644
index 00000000..b2f2a1b8
--- /dev/null
+++ b/wa/utils/counter.py
@@ -0,0 +1,46 @@
+"""
+An auto incremeting value (kind of like an AUTO INCREMENT field in SQL).
+Optionally, the name of the counter to be used is specified (each counter
+increments separately).
+
+Counts start at 1, not 0.
+
+"""
+from collections import defaultdict
+
+__all__ = [
+    'next',
+    'reset',
+    'reset_all',
+    'counter',
+]
+
+__counters = defaultdict(int)
+
+
+def next(name=None):
+    __counters[name] += 1
+    value = __counters[name]
+    return value
+
+
+def reset_all(value=0):
+    for k in __counters:
+        reset(k, value)
+
+
+def reset(name=None, value=0):
+    __counters[name] = value
+
+
+class counter(object):
+
+    def __init__(self, name):
+        self.name = name
+
+    def next(self):
+        return next(self.name)
+
+    def reset(self, value=0):
+        return reset(self.name, value)
+
diff --git a/wa/utils/diff.py b/wa/utils/diff.py
new file mode 100644
index 00000000..9318f15c
--- /dev/null
+++ b/wa/utils/diff.py
@@ -0,0 +1,81 @@
+from wa.utils.misc import write_table
+
+
+def diff_interrupt_files(before, after, result):  # pylint: disable=R0914
+    output_lines = []
+    with open(before) as bfh:
+        with open(after) as ofh:
+            for bline, aline in izip(bfh, ofh):
+                bchunks = bline.strip().split()
+                while True:
+                    achunks = aline.strip().split()
+                    if achunks[0] == bchunks[0]:
+                        diffchunks = ['']
+                        diffchunks.append(achunks[0])
+                        diffchunks.extend([diff_tokens(b, a) for b, a
+                                           in zip(bchunks[1:], achunks[1:])])
+                        output_lines.append(diffchunks)
+                        break
+                    else:  # new category appeared in the after file
+                        diffchunks = ['>'] + achunks
+                        output_lines.append(diffchunks)
+                        try:
+                            aline = ofh.next()
+                        except StopIteration:
+                            break
+
+    # Offset heading columns by one to allow for row labels on subsequent
+    # lines.
+    output_lines[0].insert(0, '')
+
+    # Any "columns" that do not have headings in the first row are not actually
+    # columns -- they are a single column where space-spearated words got
+    # split. Merge them back together to prevent them from being
+    # column-aligned by write_table.
+    table_rows = [output_lines[0]]
+    num_cols = len(output_lines[0])
+    for row in output_lines[1:]:
+        table_row = row[:num_cols]
+        table_row.append(' '.join(row[num_cols:]))
+        table_rows.append(table_row)
+
+    with open(result, 'w') as wfh:
+        write_table(table_rows, wfh)
+
+
+def diff_sysfs_dirs(before, after, result):  # pylint: disable=R0914
+    before_files = []
+    os.path.walk(before,
+                 lambda arg, dirname, names: arg.extend([os.path.join(dirname, f) for f in names]),
+                 before_files
+                 )
+    before_files = filter(os.path.isfile, before_files)
+    files = [os.path.relpath(f, before) for f in before_files]
+    after_files = [os.path.join(after, f) for f in files]
+    diff_files = [os.path.join(result, f) for f in files]
+
+    for bfile, afile, dfile in zip(before_files, after_files, diff_files):
+        if not os.path.isfile(afile):
+            logger.debug('sysfs_diff: {} does not exist or is not a file'.format(afile))
+            continue
+
+        with open(bfile) as bfh, open(afile) as afh:  # pylint: disable=C0321
+            with open(_f(dfile), 'w') as dfh:
+                for i, (bline, aline) in enumerate(izip_longest(bfh, afh), 1):
+                    if aline is None:
+                        logger.debug('Lines missing from {}'.format(afile))
+                        break
+                    bchunks = re.split(r'(\W+)', bline)
+                    achunks = re.split(r'(\W+)', aline)
+                    if len(bchunks) != len(achunks):
+                        logger.debug('Token length mismatch in {} on line {}'.format(bfile, i))
+                        dfh.write('xxx ' + bline)
+                        continue
+                    if ((len([c for c in bchunks if c.strip()]) == len([c for c in achunks if c.strip()]) == 2) and
+                            (bchunks[0] == achunks[0])):
+                        # if there are only two columns and the first column is the
+                        # same, assume it's a "header" column and do not diff it.
+                        dchunks = [bchunks[0]] + [diff_tokens(b, a) for b, a in zip(bchunks[1:], achunks[1:])]
+                    else:
+                        dchunks = [diff_tokens(b, a) for b, a in zip(bchunks, achunks)]
+                    dfh.write(''.join(dchunks))
diff --git a/wa/utils/doc.py b/wa/utils/doc.py
new file mode 100644
index 00000000..c9ddd93e
--- /dev/null
+++ b/wa/utils/doc.py
@@ -0,0 +1,307 @@
+#    Copyright 2014-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+"""
+Utilities for working with and formatting documentation.
+
+"""
+import os
+import re
+import inspect
+from itertools import cycle
+
+USER_HOME = os.path.expanduser('~')
+
+BULLET_CHARS = '-*'
+
+
+def get_summary(aclass):
+    """
+    Returns the summary description for an extension class. The summary is the
+    first paragraph (separated by blank line) of the description taken either from
+    the ``descripton`` attribute of the class, or if that is not present, from the
+    class' docstring.
+
+    """
+    return get_description(aclass).split('\n\n')[0]
+
+
+def get_description(aclass):
+    """
+    Return the description of the specified extension class. The description is taken
+    either from ``description`` attribute of the class or its docstring.
+
+    """
+    if hasattr(aclass, 'description') and aclass.description:
+        return inspect.cleandoc(aclass.description)
+    if aclass.__doc__:
+        return inspect.getdoc(aclass)
+    else:
+        return 'no documentation found for {}'.format(aclass.__name__)
+
+
+def get_type_name(obj):
+    """Returns the name of the type object or function specified. In case of a lambda,
+    the definiition is returned with the parameter replaced by "value"."""
+    match = re.search(r"<(type|class|function) '?(.*?)'?>", str(obj))
+    if isinstance(obj, tuple):
+        name = obj[1]
+    elif match.group(1) == 'function':
+        text = str(obj)
+        name = text.split()[1]
+        if name == '<lambda>':
+            source = inspect.getsource(obj).strip().replace('\n', ' ')
+            match = re.search(r'lambda\s+(\w+)\s*:\s*(.*?)\s*[\n,]', source)
+            if not match:
+                raise ValueError('could not get name for {}'.format(obj))
+            name = match.group(2).replace(match.group(1), 'value')
+    else:
+        name = match.group(2)
+        if '.' in name:
+            name = name.split('.')[-1]
+    return name
+
+
+def count_leading_spaces(text):
+    """
+    Counts the number of leading space characters in a string.
+
+    TODO: may need to update this to handle whitespace, but shouldn't
+          be necessary as there should be no tabs in Python source.
+
+    """
+    nspaces = 0
+    for c in text:
+        if c == ' ':
+            nspaces += 1
+        else:
+            break
+    return nspaces
+
+
+def format_column(text, width):
+    """
+    Formats text into a column of specified width. If a line is too long,
+    it will be broken on a word boundary. The new lines will have the same
+    number of leading spaces as the original line.
+
+    Note: this will not attempt to join up lines that are too short.
+
+    """
+    formatted = []
+    for line in text.split('\n'):
+        line_len = len(line)
+        if line_len <= width:
+            formatted.append(line)
+        else:
+            words = line.split(' ')
+            new_line = words.pop(0)
+            while words:
+                next_word = words.pop(0)
+                if (len(new_line) + len(next_word) + 1) < width:
+                    new_line += ' ' + next_word
+                else:
+                    formatted.append(new_line)
+                    new_line = ' ' * count_leading_spaces(new_line) + next_word
+            formatted.append(new_line)
+    return '\n'.join(formatted)
+
+
+def format_bullets(text, width, char='-', shift=3, outchar=None):
+    """
+    Formats text into bulleted list. Assumes each line of input that starts with
+    ``char`` (possibly preceeded with whitespace) is a new bullet point. Note: leading
+    whitespace in the input will *not* be preserved. Instead, it will be determined by
+    ``shift`` parameter.
+
+    :text: the text to be formated
+    :width: format width (note: must be at least ``shift`` + 4).
+    :char: character that indicates a new bullet point in the input text.
+    :shift: How far bulleted entries will be indented. This indicates the indentation
+            level of the bullet point. Text indentation level will be ``shift`` + 3.
+    :outchar: character that will be used to mark bullet points in the output. If
+              left as ``None``, ``char`` will be used.
+
+    """
+    bullet_lines = []
+    output = ''
+
+    def __process_bullet(bullet_lines):
+        if bullet_lines:
+            bullet = format_paragraph(indent(' '.join(bullet_lines), shift + 2), width)
+            bullet = bullet[:3] + outchar + bullet[4:]
+            del bullet_lines[:]
+            return bullet + '\n'
+        else:
+            return ''
+
+    if outchar is None:
+        outchar = char
+    for line in text.split('\n'):
+        line = line.strip()
+        if line.startswith(char):  # new bullet
+            output += __process_bullet(bullet_lines)
+            line = line[1:].strip()
+        bullet_lines.append(line)
+    output += __process_bullet(bullet_lines)
+    return output
+
+
+def format_simple_table(rows, headers=None, align='>', show_borders=True, borderchar='='):  # pylint: disable=R0914
+    """Formats a simple table."""
+    if not rows:
+        return ''
+    rows = [map(str, r) for r in rows]
+    num_cols = len(rows[0])
+
+    # cycle specified alignments until we have num_cols of them. This is
+    # consitent with how such cases are handled in R, pandas, etc.
+    it = cycle(align)
+    align = [it.next() for _ in xrange(num_cols)]
+
+    cols = zip(*rows)
+    col_widths = [max(map(len, c)) for c in cols]
+    if headers:
+        col_widths = [max(len(h), cw) for h, cw in zip(headers, col_widths)]
+    row_format = ' '.join(['{:%s%s}' % (align[i], w) for i, w in enumerate(col_widths)])
+    row_format += '\n'
+
+    border = row_format.format(*[borderchar * cw for cw in col_widths])
+
+    result = border if show_borders else ''
+    if headers:
+        result += row_format.format(*headers)
+        result += border
+    for row in rows:
+        result += row_format.format(*row)
+    if show_borders:
+        result += border
+    return result
+
+
+def format_paragraph(text, width):
+    """
+    Format the specified text into a column of specified with. The text is
+    assumed to be a single paragraph and existing line breaks will not be preserved.
+    Leading spaces (of the initial line), on the other hand, will be preserved.
+
+    """
+    text = re.sub('\n\n*\\s*', ' ', text.strip('\n'))
+    return format_column(text, width)
+
+
+def format_body(text, width):
+    """
+    Format the specified text into a column  of specified width. The text is
+    assumed to be a "body" of one or more paragraphs separated by one or more
+    blank lines. The initial indentation of the first line of each paragraph
+    will be presevered, but any other formatting may be clobbered.
+
+    """
+    text = re.sub('\n\\s*\n', '\n\n', text.strip('\n'))  # get rid of all-whitespace lines
+    paragraphs = re.split('\n\n+', text)
+    formatted_paragraphs = []
+    for p in paragraphs:
+        if p.strip() and p.strip()[0] in BULLET_CHARS:
+            formatted_paragraphs.append(format_bullets(p, width))
+        else:
+            formatted_paragraphs.append(format_paragraph(p, width))
+    return '\n\n'.join(formatted_paragraphs)
+
+
+def strip_inlined_text(text):
+    """
+    This function processes multiline inlined text (e.g. form docstrings)
+    to strip away leading spaces and leading and trailing new lines.
+
+    """
+    text = text.strip('\n')
+    lines = [ln.rstrip() for ln in text.split('\n')]
+
+    # first line is special as it may not have the indet that follows the
+    # others, e.g. if it starts on the same as the multiline quote (""").
+    nspaces = count_leading_spaces(lines[0])
+
+    if len([ln for ln in lines if ln]) > 1:
+        to_strip = min(count_leading_spaces(ln) for ln in lines[1:] if ln)
+        if nspaces >= to_strip:
+            stripped = [lines[0][to_strip:]]
+        else:
+            stripped = [lines[0][nspaces:]]
+        stripped += [ln[to_strip:] for ln in lines[1:]]
+    else:
+        stripped = [lines[0][nspaces:]]
+    return '\n'.join(stripped).strip('\n')
+
+
+def indent(text, spaces=4):
+    """Indent the lines i the specified text by ``spaces`` spaces."""
+    indented = []
+    for line in text.split('\n'):
+        if line:
+            indented.append(' ' * spaces + line)
+        else:  # do not indent emtpy lines
+            indented.append(line)
+    return '\n'.join(indented)
+
+
+def format_literal(lit):
+    if isinstance(lit, basestring):
+        return '``\'{}\'``'.format(lit)
+    elif hasattr(lit, 'pattern'):  # regex
+        return '``r\'{}\'``'.format(lit.pattern)
+    else:
+        return '``{}``'.format(lit)
+
+
+def get_params_rst(ext):
+    text = ''
+    for param in ext.parameters:
+        text += '{} : {} {}\n'.format(param.name, get_type_name(param.kind),
+                                      param.mandatory and '(mandatory)' or ' ')
+        desc = strip_inlined_text(param.description or '')
+        text += indent('{}\n'.format(desc))
+        if param.allowed_values:
+            text += indent('\nallowed values: {}\n'.format(', '.join(map(format_literal, param.allowed_values))))
+        elif param.constraint:
+            text += indent('\nconstraint: ``{}``\n'.format(get_type_name(param.constraint)))
+        if param.default:
+            value = param.default
+            if isinstance(value, basestring) and value.startswith(USER_HOME):
+                value = value.replace(USER_HOME, '~')
+            text += indent('\ndefault: {}\n'.format(format_literal(value)))
+        text += '\n'
+    return text
+
+
+def underline(text, symbol='='):
+    return '{}\n{}\n\n'.format(text, symbol * len(text))
+
+
+def get_rst_from_extension(ext):
+    text = underline(ext.name, '-')
+    if hasattr(ext, 'description'):
+        desc = strip_inlined_text(ext.description or '')
+    elif ext.__doc__:
+        desc = strip_inlined_text(ext.__doc__)
+    else:
+        desc = ''
+    text += desc + '\n\n'
+    params_rst = get_params_rst(ext)
+    if params_rst:
+        text += underline('parameters', '~') + params_rst
+    return text + '\n'
+
diff --git a/wa/utils/misc.py b/wa/utils/misc.py
new file mode 100644
index 00000000..2067f792
--- /dev/null
+++ b/wa/utils/misc.py
@@ -0,0 +1,643 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+"""
+Miscellaneous functions that don't fit anywhere else.
+
+"""
+from __future__ import division
+import os
+import sys
+import re
+import math
+import imp
+import uuid
+import string
+import threading
+import signal
+import subprocess
+import pkgutil
+import traceback
+import logging
+import random
+from datetime import datetime, timedelta
+from operator import mul, itemgetter
+from StringIO import StringIO
+from itertools import cycle, groupby
+from distutils.spawn import find_executable
+
+import yaml
+from dateutil import tz
+
+from wa.framework.version import get_wa_version
+
+
+# ABI --> architectures list
+ABI_MAP = {
+    'armeabi': ['armeabi', 'armv7', 'armv7l', 'armv7el', 'armv7lh'],
+    'arm64': ['arm64', 'armv8', 'arm64-v8a'],
+}
+
+
+def preexec_function():
+    # Ignore the SIGINT signal by setting the handler to the standard
+    # signal handler SIG_IGN.
+    signal.signal(signal.SIGINT, signal.SIG_IGN)
+    # Change process group in case we have to kill the subprocess and all of
+    # its children later.
+    # TODO: this is Unix-specific; would be good to find an OS-agnostic way
+    #       to do this in case we wanna port WA to Windows.
+    os.setpgrp()
+
+
+check_output_logger = logging.getLogger('check_output')
+
+
+# Defined here rather than in wlauto.exceptions due to module load dependencies
+class TimeoutError(Exception):
+    """Raised when a subprocess command times out. This is basically a ``WAError``-derived version
+    of ``subprocess.CalledProcessError``, the thinking being that while a timeout could be due to
+    programming error (e.g. not setting long enough timers), it is often due to some failure in the
+    environment, and there fore should be classed as a "user error"."""
+
+    def __init__(self, command, output):
+        super(TimeoutError, self).__init__('Timed out: {}'.format(command))
+        self.command = command
+        self.output = output
+
+    def __str__(self):
+        return '\n'.join([self.message, 'OUTPUT:', self.output or ''])
+
+
+def check_output(command, timeout=None, ignore=None, **kwargs):
+    """This is a version of subprocess.check_output that adds a timeout parameter to kill
+    the subprocess if it does not return within the specified time."""
+    # pylint: disable=too-many-branches
+    if ignore is None:
+        ignore = []
+    elif isinstance(ignore, int):
+        ignore = [ignore]
+    elif not isinstance(ignore, list) and ignore != 'all':
+        message = 'Invalid value for ignore parameter: "{}"; must be an int or a list'
+        raise ValueError(message.format(ignore))
+    if 'stdout' in kwargs:
+        raise ValueError('stdout argument not allowed, it will be overridden.')
+
+    def callback(pid):
+        try:
+            check_output_logger.debug('{} timed out; sending SIGKILL'.format(pid))
+            os.killpg(pid, signal.SIGKILL)
+        except OSError:
+            pass  # process may have already terminated.
+
+    process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+                               preexec_fn=preexec_function, **kwargs)
+
+    if timeout:
+        timer = threading.Timer(timeout, callback, [process.pid, ])
+        timer.start()
+
+    try:
+        output, error = process.communicate()
+    finally:
+        if timeout:
+            timer.cancel()
+
+    retcode = process.poll()
+    if retcode:
+        if retcode == -9:  # killed, assume due to timeout callback
+            raise TimeoutError(command, output='\n'.join([output, error]))
+        elif ignore != 'all' and retcode not in ignore:
+            raise subprocess.CalledProcessError(retcode, command, output='\n'.join([output, error]))
+    return output, error
+
+
+def init_argument_parser(parser):
+    parser.add_argument('-c', '--config', help='specify an additional config.py')
+    parser.add_argument('-v', '--verbose', action='count',
+                        help='The scripts will produce verbose output.')
+    parser.add_argument('--debug', action='store_true',
+                        help='Enable debug mode. Note: this implies --verbose.')
+    parser.add_argument('--version', action='version', version='%(prog)s {}'.format(get_wa_version()))
+    return parser
+
+
+def walk_modules(path):
+    """
+    Given a path to a Python package, iterate over all the modules  and
+    sub-packages in that package.
+
+    """
+    try:
+        root_mod = __import__(path, {}, {}, [''])
+        yield root_mod
+    except ImportError as e:
+        e.path = path
+        raise e
+    if not hasattr(root_mod, '__path__'):  # module, not package
+        return
+    for _, name, ispkg in pkgutil.iter_modules(root_mod.__path__):
+        try:
+            submod_path = '.'.join([path, name])
+            if ispkg:
+                for submod in walk_modules(submod_path):
+                    yield submod
+            else:
+                yield __import__(submod_path, {}, {}, [''])
+        except ImportError as e:
+            e.path = submod_path
+            raise e
+
+
+def ensure_directory_exists(dirpath):
+    """A filter for directory paths to ensure they exist."""
+    if not os.path.isdir(dirpath):
+        os.makedirs(dirpath)
+    return dirpath
+
+
+def ensure_file_directory_exists(filepath):
+    """
+    A filter for file paths to ensure the directory of the
+    file exists and the file can be created there. The file
+    itself is *not* going to be created if it doesn't already
+    exist.
+
+    """
+    ensure_directory_exists(os.path.dirname(filepath))
+    return filepath
+
+
+def diff_tokens(before_token, after_token):
+    """
+    Creates a diff of two tokens.
+
+    If the two tokens are the same it just returns returns the token
+    (whitespace tokens are considered the same irrespective of type/number
+    of whitespace characters in the token).
+
+    If the tokens are numeric, the difference between the two values
+    is returned.
+
+    Otherwise, a string in the form [before -> after] is returned.
+
+    """
+    if before_token.isspace() and after_token.isspace():
+        return after_token
+    elif before_token.isdigit() and after_token.isdigit():
+        try:
+            diff = int(after_token) - int(before_token)
+            return str(diff)
+        except ValueError:
+            return "[%s -> %s]" % (before_token, after_token)
+    elif before_token == after_token:
+        return after_token
+    else:
+        return "[%s -> %s]" % (before_token, after_token)
+
+
+def prepare_table_rows(rows):
+    """Given a list of lists, make sure they are prepared to be formatted into a table
+    by making sure each row has the same number of columns and stringifying all values."""
+    rows = [map(str, r) for r in rows]
+    max_cols = max(map(len, rows))
+    for row in rows:
+        pad = max_cols - len(row)
+        for _ in xrange(pad):
+            row.append('')
+    return rows
+
+
+def write_table(rows, wfh, align='>', headers=None):  # pylint: disable=R0914
+    """Write a column-aligned table to the specified file object."""
+    if not rows:
+        return
+    rows = prepare_table_rows(rows)
+    num_cols = len(rows[0])
+
+    # cycle specified alignments until we have max_cols of them. This is
+    # consitent with how such cases are handled in R, pandas, etc.
+    it = cycle(align)
+    align = [it.next() for _ in xrange(num_cols)]
+
+    cols = zip(*rows)
+    col_widths = [max(map(len, c)) for c in cols]
+    row_format = ' '.join(['{:%s%s}' % (align[i], w) for i, w in enumerate(col_widths)])
+    row_format += '\n'
+
+    if headers:
+        wfh.write(row_format.format(*headers))
+        underlines = ['-' * len(h) for h in headers]
+        wfh.write(row_format.format(*underlines))
+
+    for row in rows:
+        wfh.write(row_format.format(*row))
+
+
+def get_null():
+    """Returns the correct null sink based on the OS."""
+    return 'NUL' if os.name == 'nt' else '/dev/null'
+
+
+def get_traceback(exc=None):
+    """
+    Returns the string with the traceback for the specifiec exc
+    object, or for the current exception exc is not specified.
+
+    """
+    if exc is None:
+        exc = sys.exc_info()
+    if not exc:
+        return None
+    tb = exc[2]
+    sio = StringIO()
+    traceback.print_tb(tb, file=sio)
+    del tb  # needs to be done explicitly see: http://docs.python.org/2/library/sys.html#sys.exc_info
+    return sio.getvalue()
+
+
+def normalize(value, dict_type=dict):
+    """Normalize values. Recursively normalizes dict keys to be lower case,
+    no surrounding whitespace, underscore-delimited strings."""
+    if isinstance(value, dict):
+        normalized = dict_type()
+        for k, v in value.iteritems():
+            if isinstance(k, basestring):
+                k = k.strip().lower().replace(' ', '_')
+            normalized[k] = normalize(v, dict_type)
+        return normalized
+    elif isinstance(value, list):
+        return [normalize(v, dict_type) for v in value]
+    elif isinstance(value, tuple):
+        return tuple([normalize(v, dict_type) for v in value])
+    else:
+        return value
+
+
+VALUE_REGEX = re.compile(r'(\d+(?:\.\d+)?)\s*(\w*)')
+
+UNITS_MAP = {
+    's': 'seconds',
+    'ms': 'milliseconds',
+    'us': 'microseconds',
+    'ns': 'nanoseconds',
+    'V': 'volts',
+    'A': 'amps',
+    'mA': 'milliamps',
+    'J': 'joules',
+}
+
+
+def parse_value(value_string):
+    """parses a string representing a numerical value and returns
+    a tuple (value, units), where value will be either int or float,
+    and units will be a string representing the units or None."""
+    match = VALUE_REGEX.search(value_string)
+    if match:
+        vs = match.group(1)
+        value = float(vs) if '.' in vs else int(vs)
+        us = match.group(2)
+        units = UNITS_MAP.get(us, us)
+        return (value, units)
+    else:
+        return (value_string, None)
+
+
+def get_meansd(values):
+    """Returns mean and standard deviation of the specified values."""
+    if not values:
+        return float('nan'), float('nan')
+    mean = sum(values) / len(values)
+    sd = math.sqrt(sum([(v - mean) ** 2 for v in values]) / len(values))
+    return mean, sd
+
+
+def geomean(values):
+    """Returns the geometric mean of the values."""
+    return reduce(mul, values) ** (1.0 / len(values))
+
+
+def capitalize(text):
+    """Capitalises the specified text: first letter upper case,
+    all subsequent letters lower case."""
+    if not text:
+        return ''
+    return text[0].upper() + text[1:].lower()
+
+
+def convert_new_lines(text):
+    """ Convert new lines to a common format.  """
+    return text.replace('\r\n', '\n').replace('\r', '\n')
+
+
+def escape_quotes(text):
+    """Escape quotes, and escaped quotes, in the specified text."""
+    return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\'', '\\\'').replace('\"', '\\\"')
+
+
+def escape_single_quotes(text):
+    """Escape single quotes, and escaped single quotes, in the specified text."""
+    return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\'', '\'\\\'\'')
+
+
+def escape_double_quotes(text):
+    """Escape double quotes, and escaped double quotes, in the specified text."""
+    return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\"', '\\\"')
+
+
+def getch(count=1):
+    """Read ``count`` characters from standard input."""
+    if os.name == 'nt':
+        import msvcrt  # pylint: disable=F0401
+        return ''.join([msvcrt.getch() for _ in xrange(count)])
+    else:  # assume Unix
+        import tty  # NOQA
+        import termios  # NOQA
+        fd = sys.stdin.fileno()
+        old_settings = termios.tcgetattr(fd)
+        try:
+            tty.setraw(sys.stdin.fileno())
+            ch = sys.stdin.read(count)
+        finally:
+            termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+        return ch
+
+
+def isiterable(obj):
+    """Returns ``True`` if the specified object is iterable and
+    *is not a string type*, ``False`` otherwise."""
+    return hasattr(obj, '__iter__') and not isinstance(obj, basestring)
+
+
+def utc_to_local(dt):
+    """Convert naive datetime to local time zone, assuming UTC."""
+    return dt.replace(tzinfo=tz.tzutc()).astimezone(tz.tzlocal())
+
+
+def local_to_utc(dt):
+    """Convert naive datetime to UTC, assuming local time zone."""
+    return dt.replace(tzinfo=tz.tzlocal()).astimezone(tz.tzutc())
+
+
+def as_relative(path):
+    """Convert path to relative by stripping away the leading '/' on UNIX or
+    the equivant on other platforms."""
+    path = os.path.splitdrive(path)[1]
+    return path.lstrip(os.sep)
+
+
+def get_cpu_mask(cores):
+    """Return a string with the hex for the cpu mask for the specified core numbers."""
+    mask = 0
+    for i in cores:
+        mask |= 1 << i
+    return '0x{0:x}'.format(mask)
+
+
+def load_class(classpath):
+    """Loads the specified Python class. ``classpath`` must be a fully-qualified
+    class name (i.e. namspaced under module/package)."""
+    modname, clsname = classpath.rsplit('.', 1)
+    return getattr(__import__(modname), clsname)
+
+
+def get_pager():
+    """Returns the name of the system pager program."""
+    pager = os.getenv('PAGER')
+    if pager is None:
+        pager = find_executable('less')
+    if pager is None:
+        pager = find_executable('more')
+    return pager
+
+
+def enum_metaclass(enum_param, return_name=False, start=0):
+    """
+    Returns a ``type`` subclass that may be used as a metaclass for
+    an enum.
+
+    Paremeters:
+
+        :enum_param: the name of class attribute that defines enum values.
+                     The metaclass will add a class attribute for each value in
+                     ``enum_param``. The value of the attribute depends on the type
+                     of ``enum_param`` and on the values of ``return_name``. If
+                     ``return_name`` is ``True``, then the value of the new attribute is
+                     the name of that attribute; otherwise, if ``enum_param`` is a ``list``
+                     or a ``tuple``, the value will be the index of that param in
+                     ``enum_param``, optionally offset by ``start``, otherwise, it will
+                     be assumed that ``enum_param`` implementa a dict-like inteface and
+                     the value will be ``enum_param[attr_name]``.
+        :return_name: If ``True``, the enum values will the names of enum attributes. If
+                      ``False``, the default, the values will depend on the type of
+                      ``enum_param`` (see above).
+        :start: If ``enum_param`` is a list or a tuple, and ``return_name`` is ``False``,
+                this specifies an "offset" that will be added to the index of the attribute
+                within ``enum_param`` to form the value.
+
+
+    """
+    class __EnumMeta(type):
+        def __new__(mcs, clsname, bases, attrs):
+            cls = type.__new__(mcs, clsname, bases, attrs)
+            values = getattr(cls, enum_param, [])
+            if return_name:
+                for name in values:
+                    setattr(cls, name, name)
+            else:
+                if isinstance(values, list) or isinstance(values, tuple):
+                    for i, name in enumerate(values):
+                        setattr(cls, name, i + start)
+                else:  # assume dict-like
+                    for name in values:
+                        setattr(cls, name, values[name])
+            return cls
+    return __EnumMeta
+
+
+def which(name):
+    """Platform-independent version of UNIX which utility."""
+    if os.name == 'nt':
+        paths = os.getenv('PATH').split(os.pathsep)
+        exts = os.getenv('PATHEXT').split(os.pathsep)
+        for path in paths:
+            testpath = os.path.join(path, name)
+            if os.path.isfile(testpath):
+                return testpath
+            for ext in exts:
+                testpathext = testpath + ext
+                if os.path.isfile(testpathext):
+                    return testpathext
+        return None
+    else:  # assume UNIX-like
+        try:
+            result = check_output(['which', name])[0]
+            return result.strip()  # pylint: disable=E1103
+        except subprocess.CalledProcessError:
+            return None
+
+
+_bash_color_regex = re.compile('\x1b\\[[0-9;]+m')
+
+
+def strip_bash_colors(text):
+    return _bash_color_regex.sub('', text)
+
+
+def format_duration(seconds, sep=' ', order=['day', 'hour', 'minute', 'second']):  # pylint: disable=dangerous-default-value
+    """
+    Formats the specified number of seconds into human-readable duration.
+
+    """
+    if isinstance(seconds, timedelta):
+        td = seconds
+    else:
+        td = timedelta(seconds=seconds)
+    dt = datetime(1, 1, 1) + td
+    result = []
+    for item in order:
+        value = getattr(dt, item, None)
+        if item is 'day':
+            value -= 1
+        if not value:
+            continue
+        suffix = '' if value == 1 else 's'
+        result.append('{} {}{}'.format(value, item, suffix))
+    return sep.join(result)
+
+
+def get_article(word):
+    """
+    Returns the appropriate indefinite article for the word (ish).
+
+    .. note:: Indefinite article assignment in English is based on
+              sound rather than spelling, so this will not work correctly
+              in all case; e.g. this will return ``"a hour"``.
+
+    """
+    return'an' if word[0] in 'aoeiu' else 'a'
+
+
+def get_random_string(length):
+    """Returns a random ASCII string of the specified length)."""
+    return ''.join(random.choice(string.ascii_letters + string.digits) for _ in xrange(length))
+
+
+RAND_MOD_NAME_LEN = 30
+BAD_CHARS = string.punctuation + string.whitespace
+TRANS_TABLE = string.maketrans(BAD_CHARS, '_' * len(BAD_CHARS))
+
+
+def to_identifier(text):
+    """Converts text to a valid Python identifier by replacing all
+    whitespace and punctuation."""
+    result = re.sub('_+', '_', text.translate(TRANS_TABLE))
+    if result and result[0] in string.digits:
+        result = '_' + result
+    return result
+
+
+def unique(alist):
+    """
+    Returns a list containing only unique elements from the input list (but preserves
+    order, unlike sets).
+
+    """
+    result = []
+    for item in alist:
+        if item not in result:
+            result.append(item)
+    return result
+
+
+def open_file(filepath):
+    """
+    Open the specified file path with the associated launcher in an OS-agnostic way.
+
+    """
+    if os.name == 'nt':  # Windows
+        return os.startfile(filepath)  # pylint: disable=no-member
+    elif sys.platform == 'darwin':  # Mac OSX
+        return subprocess.call(['open', filepath])
+    else:  # assume Linux or similar running a freedesktop-compliant GUI
+        return subprocess.call(['xdg-open', filepath])
+
+
+def ranges_to_list(ranges_string):
+    """Converts a sysfs-style ranges string, e.g. ``"0,2-4"``, into a list ,e.g ``[0,2,3,4]``"""
+    values = []
+    for rg in ranges_string.split(','):
+        if '-' in rg:
+            first, last = map(int, rg.split('-'))
+            values.extend(xrange(first, last + 1))
+        else:
+            values.append(int(rg))
+    return values
+
+
+def list_to_ranges(values):
+    """Converts a list, e.g ``[0,2,3,4]``, into a sysfs-style ranges string, e.g. ``"0,2-4"``"""
+    range_groups = []
+    for _, g in groupby(enumerate(values), lambda (i, x): i - x):
+        range_groups.append(map(itemgetter(1), g))
+    range_strings = []
+    for group in range_groups:
+        if len(group) == 1:
+            range_strings.append(str(group[0]))
+        else:
+            range_strings.append('{}-{}'.format(group[0], group[-1]))
+    return ','.join(range_strings)
+
+
+def list_to_mask(values, base=0x0):
+    """Converts the specified list of integer values into
+    a bit mask for those values. Optinally, the list can be
+    applied to an existing mask."""
+    for v in values:
+        base |= (1 << v)
+    return base
+
+
+def mask_to_list(mask):
+    """Converts the specfied integer bitmask into a list of
+    indexes of bits that are set in the mask."""
+    size = len(bin(mask)) - 2  # because of "0b"
+    return [size - i - 1 for i in xrange(size)
+            if mask & (1 << size - i - 1)]
+
+
+class Namespace(dict):
+    """
+    A dict-like object that allows treating keys and attributes
+    interchangeably (this means that keys are restricted to strings
+    that are valid Python identifiers).
+
+    """
+
+    def __getattr__(self, name):
+        try:
+            return self[name]
+        except KeyError:
+            raise AttributeError(name)
+
+    def __setattr__(self, name, value):
+        self[name] = value
+
+    def __setitem__(self, name, value):
+        if to_identifier(name) != name:
+            message = 'Key must be a valid identifier; got "{}"'
+            raise ValueError(message.format(name))
+        dict.__setitem__(self, name, value)
diff --git a/wa/utils/serializer.py b/wa/utils/serializer.py
new file mode 100644
index 00000000..40fa93c3
--- /dev/null
+++ b/wa/utils/serializer.py
@@ -0,0 +1,245 @@
+"""
+This module contains wrappers for Python serialization modules for
+common formats that make it easier to serialize/deserialize WA
+Plain Old Data structures (serilizable WA classes implement 
+``to_pod()``/``from_pod()`` methods for converting between POD 
+structures and Python class instances).
+
+The modifications to standard serilization procedures are:
+
+    - mappings are deserialized as ``OrderedDict``\ 's are than standard
+      Python ``dict``\ 's. This allows for cleaner syntax in certain parts
+      of WA configuration (e.g. values to be written to files can be specified
+      as a dict, and they will be written in the order specified in the config).
+    - regular expressions are automatically encoded/decoded. This allows for
+      configuration values to be transparently specified as strings or regexes
+      in the POD config.
+
+This module exports the "wrapped" versions of serialization libraries,
+and this should be imported and used instead of importing the libraries 
+directly. i.e. ::
+
+    from wa.utils.serializer import yaml
+    pod = yaml.load(fh)
+
+instead of ::
+
+    import yaml
+    pod = yaml.load(fh)
+
+It's also possible to suse the serializer directly::
+
+    from wa.utils import serializer
+    pod = serializer.load(fh)
+
+This can also be used to ``dump()`` POD structures. By default,
+``dump()`` will produce JSON, but ``fmt`` parameter may be used to
+specify an alternative format (``yaml`` or ``python``). ``load()`` will
+use the file extension to guess the format, but ``fmt`` may also be used
+to specify it explicitly.
+
+"""
+import os
+import re
+import sys
+import json as _json
+from collections import OrderedDict
+from datetime import datetime
+
+import yaml as _yaml
+import dateutil.parser
+
+from wa.framework.exception import SerializerSyntaxError
+from wa.utils.types import regex_type
+from wa.utils.misc import isiterable
+
+
+__all__ = [
+    'json',
+    'yaml',
+    'read_pod',
+    'dump',
+    'load',
+]
+
+
+
+class WAJSONEncoder(_json.JSONEncoder):
+
+    def default(self, obj):
+        if hasattr(obj, 'to_pod'):
+            return obj.to_pod()
+        elif isinstance(obj, regex_type):
+            return 'REGEX:{}:{}'.format(obj.flags, obj.pattern)
+        elif isinstance(obj, datetime):
+            return 'DATET:{}'.format(obj.isoformat())
+        else:
+            return _json.JSONEncoder.default(self, obj)
+
+
+class WAJSONDecoder(_json.JSONDecoder):
+
+    def decode(self, s):
+        d = _json.JSONDecoder.decode(self, s)
+
+        def try_parse_object(v):
+            if isinstance(v, basestring) and v.startswith('REGEX:'):
+                _, flags, pattern = v.split(':', 2)
+                return re.compile(pattern, int(flags or 0))
+            elif isinstance(v, basestring) and v.startswith('DATET:'):
+                _, pattern = v.split(':', 1)
+                return dateutil.parser.parse(pattern)
+            else:
+                return v
+
+        def load_objects(d):
+            pairs = []
+            for k, v in d.iteritems():
+                if hasattr(v, 'iteritems'):
+                    pairs.append((k, load_objects(v)))
+                elif isiterable(v):
+                    pairs.append((k, [try_parse_object(i) for i in v]))
+                else:
+                    pairs.append((k, try_parse_object(v)))
+            return OrderedDict(pairs)
+
+        return load_objects(d)
+
+
+class json(object):
+
+    @staticmethod
+    def dump(o, wfh, indent=4, *args, **kwargs):
+        return _json.dump(o, wfh, cls=WAJSONEncoder, indent=indent, *args, **kwargs)
+
+
+    @staticmethod
+    def load(fh, *args, **kwargs):
+        try:
+            return _json.load(fh, cls=WAJSONDecoder, object_pairs_hook=OrderedDict, *args, **kwargs)
+        except ValueError as e:
+            raise SerializerSyntaxError(e.message)
+
+    @staticmethod
+    def loads(s, *args, **kwargs):
+        try:
+            return _json.loads(s, cls=WAJSONDecoder, object_pairs_hook=OrderedDict, *args, **kwargs)
+        except ValueError as e:
+            raise SerializerSyntaxError(e.message)
+
+
+_mapping_tag = _yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
+_regex_tag = u'tag:wa:regex'
+
+
+def _wa_dict_representer(dumper, data):
+    return dumper.represent_mapping(_mapping_tag, data.iteritems())
+
+
+def _wa_regex_representer(dumper, data):
+    text = '{}:{}'.format(data.flags, data.pattern)
+    return dumper.represent_scalar(_regex_tag, text)
+
+
+def _wa_dict_constructor(loader, node):
+    pairs = loader.construct_pairs(node)
+    seen_keys = set()
+    for k, _ in pairs:
+        if k in seen_keys:
+            raise ValueError('Duplicate entry: {}'.format(k))
+        seen_keys.add(k)
+    return OrderedDict(pairs)
+
+
+def _wa_regex_constructor(loader, node):
+    value = loader.construct_scalar(node)
+    flags, pattern = value.split(':', 1)
+    return re.compile(pattern, int(flags or 0))
+
+
+_yaml.add_representer(OrderedDict, _wa_dict_representer)
+_yaml.add_representer(regex_type, _wa_regex_representer)
+_yaml.add_constructor(_mapping_tag, _wa_dict_constructor)
+_yaml.add_constructor(_regex_tag, _wa_regex_constructor)
+
+
+class yaml(object):
+
+    @staticmethod
+    def dump(o, wfh, *args, **kwargs):
+        return _yaml.dump(o, wfh, *args, **kwargs)
+
+    @staticmethod
+    def load(fh, *args, **kwargs):
+        try:
+            return _yaml.load(fh, *args, **kwargs)
+        except _yaml.YAMLError as e:
+            lineno = None
+            if hasattr(e, 'problem_mark'):
+                lineno = e.problem_mark.line
+            raise SerializerSyntaxError(e.message, lineno)
+
+    loads = load
+
+
+class python(object):
+
+    @staticmethod
+    def dump(o, wfh, *args, **kwargs):
+        raise NotImplementedError()
+
+    @classmethod
+    def load(cls, fh, *args, **kwargs):
+        return cls.loads(fh.read())
+
+    @staticmethod
+    def loads(s, *args, **kwargs):
+        pod = {}
+        try:
+            exec s in pod
+        except SyntaxError as e:
+            raise SerializerSyntaxError(e.message, e.lineno)
+        for k in pod.keys():
+            if k.startswith('__'):
+                del pod[k]
+        return pod
+
+
+def read_pod(source, fmt=None):
+    if isinstance(source, basestring):
+        with open(source) as fh:
+            return _read_pod(fh, fmt)
+    elif hasattr(source, 'read') and (hasattr(sourc, 'name') or fmt):
+        return _read_pod(source, fmt)
+    else:
+        message = 'source must be a path or an open file handle; got {}'
+        raise ValueError(message.format(type(source)))
+
+
+def dump(o, wfh, fmt='json', *args, **kwargs):
+    serializer = {
+                'yaml': yaml,
+                'json': json,
+                'python': python,
+                'py': python,
+            }.get(fmt)
+    if serializer is None:
+        raise ValueError('Unknown serialization format: "{}"'.format(fmt))
+    serializer.dump(o, wfh, *args, **kwargs)
+
+
+def load(s, fmt='json', *args, **kwargs):
+    return read_pod(s, fmt=fmt)
+
+
+def _read_pod(fh, fmt=None):
+    if fmt is None:
+        fmt = os.path.splitext(fh.name)[1].lower().strip('.')
+    if fmt == 'yaml':
+        return yaml.load(fh)
+    elif fmt == 'json':
+        return json.load(fh)
+    elif fmt == 'py':
+        return python.load(fh)
+    else:
+        raise ValueError('Unknown format "{}": {}'.format(fmt, path))
diff --git a/wa/utils/types.py b/wa/utils/types.py
new file mode 100644
index 00000000..7a2e47ea
--- /dev/null
+++ b/wa/utils/types.py
@@ -0,0 +1,497 @@
+#    Copyright 2014-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+"""
+Routines for doing various type conversions. These usually embody some higher-level
+semantics than are present in standard Python types (e.g. ``boolean`` will convert the
+string ``"false"`` to ``False``, where as non-empty strings are usually considered to be
+``True``).
+
+A lot of these are intened to stpecify type conversions declaratively in place like
+``Parameter``'s ``kind`` argument. These are basically "hacks" around the fact that Python
+is not the best language to use for configuration.
+
+"""
+import os
+import re
+import math
+import shlex
+import numbers
+from bisect import insort
+from collections import defaultdict
+
+from wa.utils.misc import isiterable, to_identifier
+
+
+def identifier(text):
+    """Converts text to a valid Python identifier by replacing all
+    whitespace and punctuation."""
+    return to_identifier(text)
+
+
+def boolean(value):
+    """
+    Returns bool represented by the value. This is different from
+    calling the builtin bool() in that it will interpret string representations.
+    e.g. boolean('0') and boolean('false') will both yield False.
+
+    """
+    false_strings = ['', '0', 'n', 'no']
+    if isinstance(value, basestring):
+        value = value.lower()
+        if value in false_strings or 'false'.startswith(value):
+            return False
+    return bool(value)
+
+
+def integer(value):
+    """Handles conversions for string respresentations of binary, octal and hex."""
+    if isinstance(value, basestring):
+        return int(value, 0)
+    else:
+        return int(value)
+
+
+def numeric(value):
+    """
+    Returns the value as number (int if possible, or float otherwise), or
+    raises ``ValueError`` if the specified ``value`` does not have a straight
+    forward numeric conversion.
+
+    """
+    if isinstance(value, int):
+        return value
+    try:
+        fvalue = float(value)
+    except ValueError:
+        raise ValueError('Not numeric: {}'.format(value))
+    if not math.isnan(fvalue) and not math.isinf(fvalue):
+        ivalue = int(fvalue)
+        # yeah, yeah, I know. Whatever. This is best-effort.
+        if ivalue == fvalue:
+            return ivalue
+    return fvalue
+
+
+def list_of_strs(value):
+    """
+    Value must be iterable. All elements will be converted to strings.
+
+    """
+    if not isiterable(value):
+        raise ValueError(value)
+    return map(str, value)
+
+list_of_strings = list_of_strs
+
+
+def list_of_ints(value):
+    """
+    Value must be iterable. All elements will be converted to ``int``\ s.
+
+    """
+    if not isiterable(value):
+        raise ValueError(value)
+    return map(int, value)
+
+list_of_integers = list_of_ints
+
+
+def list_of_numbers(value):
+    """
+    Value must be iterable. All elements will be converted to numbers (either ``ints`` or
+    ``float``\ s depending on the elements).
+
+    """
+    if not isiterable(value):
+        raise ValueError(value)
+    return map(numeric, value)
+
+
+def list_of_bools(value, interpret_strings=True):
+    """
+    Value must be iterable. All elements will be converted to ``bool``\ s.
+
+    .. note:: By default, ``boolean()`` conversion function will be used, which means that
+              strings like ``"0"`` or ``"false"`` will be interpreted as ``False``. If this
+              is undesirable, set ``interpret_strings`` to ``False``.
+
+    """
+    if not isiterable(value):
+        raise ValueError(value)
+    if interpret_strings:
+        return map(boolean, value)
+    else:
+        return map(bool, value)
+
+
+def list_of(type_):
+    """Generates a "list of" callable for the specified type. The callable
+    attempts to convert all elements in the passed value to the specifed
+    ``type_``, raising ``ValueError`` on error."""
+
+    def __init__(self, values):
+        list.__init__(self, map(type_, values))
+
+    def append(self, value):
+        list.append(self, type_(value))
+
+    def extend(self, other):
+        list.extend(self, map(type_, other))
+
+    def __setitem__(self, idx, value):
+        list.__setitem__(self, idx, type_(value))
+
+    return type('list_of_{}s'.format(type_.__name__),
+                (list, ), {
+                    "__init__": __init__,
+                    "__setitem__": __setitem__,
+                    "append": append,
+                    "extend": extend,
+    })
+
+
+def list_or_string(value):
+    """
+    Converts the value into a list of strings. If the value is not iterable,
+    a one-element list with stringified value will be returned.
+
+    """
+    if isinstance(value, basestring):
+        return [value]
+    else:
+        try:
+            return list(value)
+        except ValueError:
+            return [str(value)]
+
+
+def list_or_caseless_string(value):
+    """
+    Converts the value into a list of ``caseless_string``'s. If the value is not iterable
+    a one-element list with stringified value will be returned.
+
+    """
+    if isinstance(value, basestring):
+        return [caseless_string(value)]
+    else:
+        try:
+            return map(caseless_string, value)
+        except ValueError:
+            return [caseless_string(value)]
+
+
+def list_or(type_):
+    """
+    Generator for "list or" types. These take either a single value or a list values
+    and return a list of the specfied ``type_`` performing the conversion on the value
+    (if a single value is specified) or each of the elemented of the specified list.
+
+    """
+    list_type = list_of(type_)
+
+    class list_or_type(list_type):
+
+        def __init__(self, value):
+            # pylint: disable=non-parent-init-called,super-init-not-called
+            if isiterable(value):
+                list_type.__init__(self, value)
+            else:
+                list_type.__init__(self, [value])
+    return list_or_type
+
+
+list_or_integer = list_or(integer)
+list_or_number = list_or(numeric)
+list_or_bool = list_or(boolean)
+
+
+regex_type = type(re.compile(''))
+
+
+def regex(value):
+    """
+    Regular expression. If value is a string, it will be complied with no flags. If you
+    want to specify flags, value must be precompiled.
+
+    """
+    if isinstance(value, regex_type):
+        return value
+    else:
+        return re.compile(value)
+
+
+class caseless_string(str):
+    """
+    Just like built-in Python string except case-insensitive on comparisons. However, the
+    case is preserved otherwise.
+
+    """
+
+    def __eq__(self, other):
+        if isinstance(other, basestring):
+            other = other.lower()
+        return self.lower() == other
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __cmp__(self, other):
+        if isinstance(basestring, other):
+            other = other.lower()
+        return cmp(self.lower(), other)
+
+    def format(self, *args, **kwargs):
+        return caseless_string(super(caseless_string, self).format(*args, **kwargs))
+
+
+class arguments(list):
+    """
+    Represents command line arguments to be passed to a program.
+
+    """
+
+    def __init__(self, value=None):
+        if isiterable(value):
+            super(arguments, self).__init__(map(str, value))
+        elif isinstance(value, basestring):
+            posix = os.name != 'nt'
+            super(arguments, self).__init__(shlex.split(value, posix=posix))
+        elif value is None:
+            super(arguments, self).__init__()
+        else:
+            super(arguments, self).__init__([str(value)])
+
+    def append(self, value):
+        return super(arguments, self).append(str(value))
+
+    def extend(self, values):
+        return super(arguments, self).extend(map(str, values))
+
+    def __str__(self):
+        return ' '.join(self)
+
+
+class prioritylist(object):
+
+    def __init__(self):
+        """
+        Returns an OrderedReceivers object that externaly behaves
+        like a list but it maintains the order of its elements
+        according to their priority.
+        """
+        self.elements = defaultdict(list)
+        self.is_ordered = True
+        self.priorities = []
+        self.size = 0
+        self._cached_elements = None
+
+    def add(self, new_element, priority=0):
+        """
+        adds a new item in the list.
+
+        - ``new_element`` the element to be inserted in the prioritylist
+        - ``priority`` is the priority of the element which specifies its
+        order withing the List
+        """
+        self._add_element(new_element, priority)
+
+    def add_before(self, new_element, element):
+        priority, index = self._priority_index(element)
+        self._add_element(new_element, priority, index)
+
+    def add_after(self, new_element, element):
+        priority, index = self._priority_index(element)
+        self._add_element(new_element, priority, index + 1)
+
+    def index(self, element):
+        return self._to_list().index(element)
+
+    def remove(self, element):
+        index = self.index(element)
+        self.__delitem__(index)
+
+    def _priority_index(self, element):
+        for priority, elements in self.elements.iteritems():
+            if element in elements:
+                return (priority, elements.index(element))
+        raise IndexError(element)
+
+    def _to_list(self):
+        if self._cached_elements is None:
+            self._cached_elements = []
+            for priority in self.priorities:
+                self._cached_elements += self.elements[priority]
+        return self._cached_elements
+
+    def _add_element(self, element, priority, index=None):
+        if index is None:
+            self.elements[priority].append(element)
+        else:
+            self.elements[priority].insert(index, element)
+        self.size += 1
+        self._cached_elements = None
+        if priority not in self.priorities:
+            insort(self.priorities, priority)
+
+    def _delete(self, priority, priority_index):
+        del self.elements[priority][priority_index]
+        self.size -= 1
+        if len(self.elements[priority]) == 0:
+            self.priorities.remove(priority)
+        self._cached_elements = None
+
+    def __iter__(self):
+        for priority in reversed(self.priorities):  # highest priority first
+            for element in self.elements[priority]:
+                yield element
+
+    def __getitem__(self, index):
+        return self._to_list()[index]
+
+    def __delitem__(self, index):
+        if isinstance(index, numbers.Integral):
+            index = int(index)
+            if index < 0:
+                index_range = [len(self) + index]
+            else:
+                index_range = [index]
+        elif isinstance(index, slice):
+            index_range = range(index.start or 0, index.stop, index.step or 1)
+        else:
+            raise ValueError('Invalid index {}'.format(index))
+        current_global_offset = 0
+        priority_counts = {priority: count for (priority, count) in
+                           zip(self.priorities, [len(self.elements[p]) for p in self.priorities])}
+        for priority in self.priorities:
+            if not index_range:
+                break
+            priority_offset = 0
+            while index_range:
+                del_index = index_range[0]
+                if priority_counts[priority] + current_global_offset <= del_index:
+                    current_global_offset += priority_counts[priority]
+                    break
+                within_priority_index = del_index - \
+                    (current_global_offset + priority_offset)
+                self._delete(priority, within_priority_index)
+                priority_offset += 1
+                index_range.pop(0)
+
+    def __len__(self):
+        return self.size
+
+
+class TreeNode(object):
+
+    @property
+    def is_root(self):
+        return self.parent is None
+    
+    @property
+    def is_leaf(self):
+        return not self.children
+
+    @property
+    def parent(self):
+        return self._parent
+
+    @parent.setter
+    def parent(self, parent):
+        if self._parent:
+            self._parent.remove_child(self)
+        self._parent = parent
+        if self._parent:
+            self._parent.add_child(self)
+
+    @property
+    def children(self):
+        return [c for c in self._children]
+
+    def __init__(self):
+        self._parent = None
+        self._children = []
+
+    def add_child(self, node):
+        if node == self:
+            raise ValueError('A node cannot be its own child.')
+        if node in self._children:
+            return
+        for ancestor in self.iter_ancestors():
+            if ancestor == node:
+                raise ValueError('Can\'t add {} as a child, as it already an ancestor')
+        if node.parent and node.parent != self:
+            raise ValueError('Cannot add {}, as it already has a parent.'.format(node))
+        self._children.append(node)
+        node._parent = self
+
+    def remove_child(self, node):
+        if node not in self._children:
+            message = 'Cannot remove: {} is not a child of {}'
+            raise ValueError(message.format(node, self))
+        self._children.remove(node)
+        node._parent = None
+
+    def iter_ancestors(self, after=None, upto=None):
+        if upto == self:
+            return
+        ancestor = self
+        if after:
+            while ancestor != after:
+                ancestor = ancestor.parent
+        while ancestor and ancestor != upto:
+            yield ancestor
+            ancestor = ancestor.parent
+
+    def iter_descendants(self):
+        for child in self.children:
+            yield child
+            for grandchild in child.iter_descendants():
+                yield grandchild
+
+    def iter_leaves(self):
+        for descendant in self.iter_descendants():
+            if descendant.is_leaf:
+                yield descendant
+
+    def get_common_ancestor(self, other):
+        if self.has_ancestor(other):
+            return other
+        if other.has_ancestor(self):
+            return self
+        for my_ancestor in self.iter_ancestors():
+            for other_ancestor in other.iter_ancestors():
+                if my_ancestor == other_ancestor:
+                    return my_ancestor
+
+    def get_root(self):
+        node = self
+        while not node.is_root:
+            node = node.parent
+        return node
+
+    def has_ancestor(self, other):
+        for ancestor in self.iter_ancestors():
+            if other == ancestor:
+                return True
+        return False
+
+    def has_descendant(self, other):
+        for descendant in self.iter_descendants():
+            if other == descendant:
+                return True
+        return False
+
diff --git a/wa/workloads/__init__.py b/wa/workloads/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/wa/workloads/dhrystone/__init__.py b/wa/workloads/dhrystone/__init__.py
new file mode 100644
index 00000000..69f554a0
--- /dev/null
+++ b/wa/workloads/dhrystone/__init__.py
@@ -0,0 +1,130 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#pylint: disable=E1101,W0201
+
+import os
+import re
+
+from wa import Workload, Parameter, ConfigError, runmethod
+
+
+this_dir = os.path.dirname(__file__)
+
+
+class Dhrystone(Workload):
+
+    name = 'dhrystone'
+    description = """
+    Runs the Dhrystone benchmark.
+
+    Original source from::
+
+        http://classes.soe.ucsc.edu/cmpe202/benchmarks/standard/dhrystone.c
+
+    This version has been modified to configure duration and the number of
+    threads used.
+
+    """
+
+    bm_regex = re.compile(r'This machine benchmarks at (?P<score>\d+)')
+    dmips_regex = re.compile(r'(?P<score>\d+) DMIPS')
+    time_regex = re.compile(r'Total dhrystone run time: (?P<time>[0-9.]+)')
+
+    default_mloops = 100
+
+    parameters = [
+        Parameter('duration', kind=int, default=0,
+                  description='The duration, in seconds, for which dhrystone will be executed. '
+                              'Either this or ``mloops`` should be specified but not both.'),
+        Parameter('mloops', kind=int, default=0,
+                  description='Millions of loops to run. Either this or ``duration`` should be '
+                              'specified, but not both. If neither is specified, this will default '
+                              'to ``{}``'.format(default_mloops)),
+        Parameter('threads', kind=int, default=4,
+                  description='The number of separate dhrystone "threads" that will be forked.'),
+        Parameter('delay', kind=int, default=0,
+                  description=('The delay, in seconds, between kicking off of dhrystone '
+                               'threads (if ``threads`` > 1).')),
+        Parameter('taskset_mask', kind=int, default=0,
+                  description='The processes spawned by sysbench will be pinned to cores as specified by this parameter'),
+    ]
+
+    @runmethod
+    def initialize(self, context):
+        host_exe = os.path.join(this_dir, 'dhrystone')
+        Dhrystone.target_exe = self.target.install(host_exe)
+
+    def setup(self, context):
+        execution_mode = '-l {}'.format(self.mloops) if self.mloops else '-r {}'.format(self.duration)
+        if self.taskset_mask:
+            taskset_string = 'busybox taskset 0x{:x} '.format(self.taskset_mask)
+        else:
+            taskset_string = ''
+        self.command = '{}{} {} -t {} -d {}'.format(taskset_string,
+                                                    self.target_exe,
+                                                    execution_mode,
+                                                    self.threads, self.delay)
+        self.timeout = self.duration and self.duration + self.delay * self.threads + 10 or 300
+        self.target.killall('dhrystone')
+
+    def run(self, context):
+        try:
+            self.output = self.target.execute(self.command, timeout=self.timeout, check_exit_code=False)
+        except KeyboardInterrupt:
+            self.target.killall('dhrystone')
+            raise
+
+    def update_result(self, context):
+        outfile = os.path.join(context.output_directory, 'dhrystone.output')
+        with open(outfile, 'w') as wfh:
+            wfh.write(self.output)
+        score_count = 0
+        dmips_count = 0
+        total_score = 0
+        total_dmips = 0
+        for line in self.output.split('\n'):
+            match = self.time_regex.search(line)
+            if match:
+                context.add_metric('time', float(match.group('time')), 'seconds', lower_is_better=True)
+            else:
+                match = self.bm_regex.search(line)
+                if match:
+                    metric = 'thread {} score'.format(score_count)
+                    value = int(match.group('score'))
+                    context.add_metric(metric, value)
+                    score_count += 1
+                    total_score += value
+                else:
+                    match = self.dmips_regex.search(line)
+                    if match:
+                        metric = 'thread {} DMIPS'.format(dmips_count)
+                        value = int(match.group('score'))
+                        context.add_metric(metric, value)
+                        dmips_count += 1
+                        total_dmips += value
+        context.add_metric('total DMIPS', total_dmips)
+        context.add_metric('total score', total_score)
+
+    @runmethod
+    def finalize(self, context):
+        self.target.uninstall('dhrystone')
+
+    def validate(self):
+        if self.mloops and self.duration:  # pylint: disable=E0203
+            raise ConfigError('mloops and duration cannot be both specified at the same time for dhrystone.')
+        if not self.mloops and not self.duration:  # pylint: disable=E0203
+            self.mloops = self.default_mloops
+
diff --git a/wa/workloads/dhrystone/dhrystone b/wa/workloads/dhrystone/dhrystone
new file mode 100755
index 00000000..68cd9b71
Binary files /dev/null and b/wa/workloads/dhrystone/dhrystone differ
diff --git a/wa/workloads/dhrystone/src/build.sh b/wa/workloads/dhrystone/src/build.sh
new file mode 100755
index 00000000..61fcce5d
--- /dev/null
+++ b/wa/workloads/dhrystone/src/build.sh
@@ -0,0 +1,23 @@
+#    Copyright 2013-2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+ndk-build
+if [[ -f libs/armeabi/dhrystone ]]; then
+	echo "Dhrystone binary updated."
+	cp libs/armeabi/dhrystone ..
+	rm -rf libs
+	rm -rf obj
+fi
diff --git a/wa/workloads/dhrystone/src/jni/Android.mk b/wa/workloads/dhrystone/src/jni/Android.mk
new file mode 100644
index 00000000..2f974319
--- /dev/null
+++ b/wa/workloads/dhrystone/src/jni/Android.mk
@@ -0,0 +1,11 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_SRC_FILES:= dhrystone.c 
+LOCAL_MODULE := dhrystone
+LOCAL_MODULE_TAGS := optional
+LOCAL_STATIC_LIBRARIES := libc
+LOCAL_SHARED_LIBRARIES := liblog
+LOCAL_LDLIBS := -llog
+LOCAL_CFLAGS := -O2
+include $(BUILD_EXECUTABLE)
diff --git a/wa/workloads/dhrystone/src/jni/dhrystone.c b/wa/workloads/dhrystone/src/jni/dhrystone.c
new file mode 100644
index 00000000..9f16003e
--- /dev/null
+++ b/wa/workloads/dhrystone/src/jni/dhrystone.c
@@ -0,0 +1,959 @@
+/* ARM modifications to the original Dhrystone are */
+/*    Copyright 2013-2015 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+
+/***** hpda:net.sources / homxb!gemini /  1:58 am  Apr  1, 1986*/
+/*	EVERBODY:	Please read "APOLOGY" below. -rick 01/06/85
+ *			See introduction in net.arch, or net.micro
+ *
+ *	"DHRYSTONE" Benchmark Program
+ *
+ *	Version:	C/1.1, 12/01/84
+ *
+ *	Date:		PROGRAM updated 01/06/86, RESULTS updated 03/31/86
+ *
+ *	Author:		Reinhold P. Weicker,  CACM Vol 27, No 10, 10/84 pg. 1013
+ *			Translated from ADA by Rick Richardson
+ *			Every method to preserve ADA-likeness has been used,
+ *			at the expense of C-ness.
+ *
+ *	Compile:	cc -O dry.c -o drynr			: No registers
+ *			cc -O -DREG=register dry.c -o dryr	: Registers
+ *
+ *	Defines:	Defines are provided for old C compiler's
+ *			which don't have enums, and can't assign structures.
+ *			The time(2) function is library dependant; Most
+ *			return the time in seconds, but beware of some, like
+ *			Aztec C, which return other units.
+ *			The LOOPS define is initially set for 50000 loops.
+ *			If you have a machine with large integers and is
+ *			very fast, please change this number to 500000 to
+ *			get better accuracy.  Please select the way to
+ *			measure the execution time using the TIME define.
+ *			For single user machines, time(2) is adequate. For
+ *			multi-user machines where you cannot get single-user
+ *			access, use the times(2) function.  If you have
+ *			neither, use a stopwatch in the dead of night.
+ *			Use a "printf" at the point marked "start timer"
+ *			to begin your timings. DO NOT use the UNIX "time(1)"
+ *			command, as this will measure the total time to
+ *			run this program, which will (erroneously) include
+ *			the time to malloc(3) storage and to compute the
+ *			time it takes to do nothing.
+ *
+ *	Run:		drynr; dryr
+ *
+ *	Results:	If you get any new machine/OS results, please send to:
+ *
+ *				ihnp4!castor!pcrat!rick
+ *
+ *			and thanks to all that do.  Space prevents listing
+ *			the names of those who have provided some of these
+ *			results.  I'll be forwarding these results to
+ *			Rheinhold Weicker.
+ *
+ *	Note:		I order the list in increasing performance of the
+ *			"with registers" benchmark.  If the compiler doesn't
+ *			provide register variables, then the benchmark
+ *			is the same for both REG and NOREG.
+ *
+ *	PLEASE:		Send complete information about the machine type,
+ *			clock speed, OS and C manufacturer/version.  If
+ *			the machine is modified, tell me what was done.
+ *			On UNIX, execute uname -a and cc -V to get this info.
+ *
+ *	80x8x NOTE:	80x8x benchers: please try to do all memory models
+ *			for a particular compiler.
+ *
+ *	APOLOGY (1/30/86):
+ *		Well, I goofed things up!  As pointed out by Haakon Bugge,
+ *		the line of code marked "GOOF" below was missing from the
+ *		Dhrystone distribution for the last several months.  It
+ *		*WAS* in a backup copy I made last winter, so no doubt it
+ *		was victimized by sleepy fingers operating vi!
+ *
+ *		The effect of the line missing is that the reported benchmarks
+ *		are 15% too fast (at least on a 80286).  Now, this creates
+ *		a dilema - do I throw out ALL the data so far collected
+ *		and use only results from this (corrected) version, or
+ *		do I just keep collecting data for the old version?
+ *
+ *		Since the data collected so far *is* valid as long as it
+ *		is compared with like data, I have decided to keep
+ *		TWO lists- one for the old benchmark, and one for the
+ *		new.  This also gives me an opportunity to correct one
+ *		other error I made in the instructions for this benchmark.
+ *		My experience with C compilers has been mostly with
+ *		UNIX 'pcc' derived compilers, where the 'optimizer' simply
+ *		fixes sloppy code generation (peephole optimization).
+ *		But today, there exist C compiler optimizers that will actually
+ *		perform optimization in the Computer Science sense of the word,
+ *		by removing, for example, assignments to a variable whose
+ *		value is never used.  Dhrystone, unfortunately, provides
+ *		lots of opportunities for this sort of optimization.
+ *
+ *		I request that benchmarkers re-run this new, corrected
+ *		version of Dhrystone, turning off or bypassing optimizers
+ *		which perform more than peephole optimization.  Please
+ *		indicate the version of Dhrystone used when reporting the
+ *		results to me.
+ *		
+ * RESULTS BEGIN HERE
+ *
+ *----------------DHRYSTONE VERSION 1.1 RESULTS BEGIN--------------------------
+ *
+ * MACHINE	MICROPROCESSOR	OPERATING	COMPILER	DHRYSTONES/SEC.
+ * TYPE				SYSTEM				NO REG	REGS
+ * --------------------------	------------	-----------	---------------
+ * Apple IIe	65C02-1.02Mhz	DOS 3.3		Aztec CII v1.05i  37	  37
+ * -		Z80-2.5Mhz	CPM-80 v2.2	Aztec CII v1.05g  91	  91
+ * -		8086-8Mhz	RMX86 V6	Intel C-86 V2.0	 197	 203LM??
+ * IBM PC/XT	8088-4.77Mhz	COHERENT 2.3.43	Mark Wiiliams	 259	 275
+ * -		8086-8Mhz	RMX86 V6	Intel C-86 V2.0	 287	 304 ??
+ * Fortune 32:16 68000-6Mhz	V7+sys3+4.1BSD  cc		 360	 346
+ * PDP-11/34A	w/FP-11C	UNIX V7m	cc		 406	 449
+ * Macintosh512	68000-7.7Mhz	Mac ROM O/S	DeSmet(C ware)	 625	 625
+ * VAX-11/750	w/FPA		UNIX 4.2BSD	cc		 831	 852
+ * DataMedia 932 68000-10Mhz	UNIX sysV	cc		 837	 888
+ * Plexus P35	68000-12.5Mhz	UNIX sysIII	cc		 835	 894
+ * ATT PC7300	68010-10Mhz	UNIX 5.0.3	cc		 973	1034
+ * Compaq II	80286-8Mhz	MSDOS 3.1	MS C 3.0 	1086	1140 LM
+ * IBM PC/AT    80286-7.5Mhz    Venix/286 SVR2  cc              1159    1254 *15
+ * Compaq II	80286-8Mhz	MSDOS 3.1	MS C 3.0 	1190	1282 MM
+ * MicroVAX II	-		Mach/4.3	cc		1361	1385
+ * DEC uVAX II	-		Ultrix-32m v1.1	cc		1385	1399
+ * Compaq II	80286-8Mhz	MSDOS 3.1	MS C 3.0 	1351	1428
+ * VAX 11/780	-		UNIX 4.2BSD	cc		1417	1441
+ * VAX-780/MA780		Mach/4.3	cc		1428	1470
+ * VAX 11/780	-		UNIX 5.0.1	cc 4.1.1.31	1650	1640
+ * Ridge 32C V1	-		ROS 3.3		Ridge C (older)	1628	1695
+ * Gould PN6005	-		UTX 1.1c+ (4.2)	cc		1732	1884
+ * Gould PN9080	custom ECL	UTX-32 1.1C	cc		4745	4992
+ * VAX-784	-		Mach/4.3	cc		5263	5555 &4
+ * VAX 8600	-		4.3 BSD		cc		6329	6423
+ * Amdahl 5860	-		UTS sysV	cc 1.22	       28735   28846
+ * IBM3090/200	-		?		?	       31250   31250
+ *
+ *
+ *----------------DHRYSTONE VERSION 1.0 RESULTS BEGIN--------------------------
+ *
+ * MACHINE	MICROPROCESSOR	OPERATING	COMPILER	DHRYSTONES/SEC.
+ * TYPE				SYSTEM				NO REG	REGS
+ * --------------------------	------------	-----------	---------------
+ * Commodore 64	6510-1MHz	C64 ROM		C Power 2.8	  36	  36
+ * HP-110	8086-5.33Mhz	MSDOS 2.11	Lattice 2.14	 284	 284
+ * IBM PC/XT	8088-4.77Mhz	PC/IX		cc		 271	 294
+ * CCC 3205	-		Xelos(SVR2) 	cc		 558	 592
+ * Perq-II	2901 bitslice	Accent S5c 	cc (CMU)	 301	 301
+ * IBM PC/XT	8088-4.77Mhz	COHERENT 2.3.43	MarkWilliams cc  296	 317
+ * Cosmos	68000-8Mhz	UniSoft		cc		 305	 322
+ * IBM PC/XT	8088-4.77Mhz	Venix/86 2.0	cc		 297	 324
+ * DEC PRO 350  11/23           Venix/PRO SVR2  cc               299     325
+ * IBM PC	8088-4.77Mhz	MSDOS 2.0	b16cc 2.0	 310	 340
+ * PDP11/23	11/23           Venix (V7)      cc               320     358
+ * Commodore Amiga		?		Lattice 3.02	 368	 371
+ * PC/XT        8088-4.77Mhz    Venix/86 SYS V  cc               339     377
+ * IBM PC	8088-4.77Mhz	MSDOS 2.0	CI-C86 2.20M	 390	 390
+ * IBM PC/XT	8088-4.77Mhz	PCDOS 2.1	Wizard 2.1	 367	 403
+ * IBM PC/XT	8088-4.77Mhz	PCDOS 3.1	Lattice 2.15	 403	 403 @
+ * Colex DM-6	68010-8Mhz	Unisoft SYSV	cc		 378	 410
+ * IBM PC	8088-4.77Mhz	PCDOS 3.1	Datalight 1.10	 416	 416
+ * IBM PC	NEC V20-4.77Mhz	MSDOS 3.1	MS 3.1 		 387	 420
+ * IBM PC/XT	8088-4.77Mhz	PCDOS 2.1	Microsoft 3.0	 390	 427
+ * IBM PC	NEC V20-4.77Mhz	MSDOS 3.1	MS 3.1 (186) 	 393	 427
+ * PDP-11/34	-		UNIX V7M	cc		 387	 438
+ * IBM PC	8088, 4.77mhz	PC-DOS 2.1	Aztec C v3.2d	 423	 454
+ * Tandy 1000	V20, 4.77mhz	MS-DOS 2.11	Aztec C v3.2d	 423	 458
+ * Tandy TRS-16B 68000-6Mhz	Xenix 1.3.5	cc		 438	 458
+ * PDP-11/34	-		RSTS/E		decus c		 438	 495
+ * Onyx C8002	Z8000-4Mhz	IS/1 1.1 (V7)	cc		 476	 511
+ * Tandy TRS-16B 68000-6Mhz	Xenix 1.3.5	Green Hills	 609	 617
+ * DEC PRO 380  11/73           Venix/PRO SVR2  cc               577     628
+ * FHL QT+	68000-10Mhz	Os9/68000	version 1.3	 603	 649 FH
+ * Apollo DN550	68010-?Mhz	AegisSR9/IX	cc 3.12		 666	 666
+ * HP-110	8086-5.33Mhz	MSDOS 2.11	Aztec-C		 641	 676 
+ * ATT PC6300	8086-8Mhz	MSDOS 2.11	b16cc 2.0	 632	 684
+ * IBM PC/AT	80286-6Mhz	PCDOS 3.0	CI-C86 2.1	 666	 684
+ * Tandy 6000	68000-8Mhz	Xenix 3.0	cc		 694	 694
+ * IBM PC/AT	80286-6Mhz	Xenix 3.0	cc		 684	 704 MM
+ * Macintosh	68000-7.8Mhz 2M	Mac Rom		Mac C 32 bit int 694	 704
+ * Macintosh	68000-7.7Mhz	-		MegaMax C 2.0	 661	 709
+ * Macintosh512	68000-7.7Mhz	Mac ROM O/S	DeSmet(C ware)	 714	 714
+ * IBM PC/AT	80286-6Mhz	Xenix 3.0	cc		 704	 714 LM
+ * Codata 3300	68000-8Mhz	UniPlus+ (v7)	cc		 678	 725
+ * WICAT MB	68000-8Mhz	System V	WICAT C 4.1	 585	 731 ~
+ * Cadmus 9000	68010-10Mhz	UNIX		cc		 714	 735
+ * AT&T 6300    8086-8Mhz       Venix/86 SVR2   cc               668     743
+ * Cadmus 9790	68010-10Mhz 1MB	SVR0,Cadmus3.7	cc		 720	 747
+ * NEC PC9801F	8086-8Mhz	PCDOS 2.11	Lattice 2.15	 768	  -  @
+ * ATT PC6300	8086-8Mhz	MSDOS 2.11	CI-C86 2.20M	 769	 769
+ * Burroughs XE550 68010-10Mhz	Centix 2.10	cc		 769	 769 CT1
+ * EAGLE/TURBO  8086-8Mhz       Venix/86 SVR2   cc               696     779
+ * ALTOS 586	8086-10Mhz	Xenix 3.0b	cc 		 724	 793
+ * DEC 11/73	J-11 micro	Ultrix-11 V3.0	cc		 735	 793
+ * ATT 3B2/300	WE32000-?Mhz	UNIX 5.0.2	cc		 735	 806
+ * Apollo DN320	68010-?Mhz	AegisSR9/IX	cc 3.12		 806	 806
+ * IRIS-2400	68010-10Mhz	UNIX System V	cc		 772	 829
+ * Atari 520ST  68000-8Mhz      TOS             DigResearch      839     846
+ * IBM PC/AT	80286-6Mhz	PCDOS 3.0	MS 3.0(large)	 833	 847 LM
+ * WICAT MB	68000-8Mhz	System V	WICAT C 4.1	 675	 853 S~
+ * VAX 11/750	-		Ultrix 1.1	4.2BSD cc	 781	 862
+ * CCC  7350A	68000-8MHz	UniSoft V.2	cc		 821	 875
+ * VAX 11/750	-		UNIX 4.2bsd	cc		 862	 877
+ * Fast Mac	68000-7.7Mhz	-		MegaMax C 2.0	 839	 904 +
+ * IBM PC/XT	8086-9.54Mhz	PCDOS 3.1	Microsoft 3.0	 833	 909 C1
+ * DEC 11/44			Ultrix-11 V3.0	cc		 862	 909
+ * Macintosh	68000-7.8Mhz 2M	Mac Rom		Mac C 16 bit int 877	 909 S
+ * CCC 3210	-		Xelos R01(SVR2)	cc		 849	 924
+ * CCC 3220	-               Ed. 7 v2.3      cc		 892	 925
+ * IBM PC/AT	80286-6Mhz	Xenix 3.0	cc -i		 909	 925
+ * AT&T 6300	8086, 8mhz	MS-DOS 2.11	Aztec C v3.2d	 862	 943
+ * IBM PC/AT	80286-6Mhz	Xenix 3.0	cc		 892	 961
+ * VAX 11/750	w/FPA		Eunice 3.2	cc		 914	 976
+ * IBM PC/XT	8086-9.54Mhz	PCDOS 3.1	Wizard 2.1	 892	 980 C1
+ * IBM PC/XT	8086-9.54Mhz	PCDOS 3.1	Lattice 2.15	 980	 980 C1
+ * Plexus P35	68000-10Mhz	UNIX System III cc		 984	 980
+ * PDP-11/73	KDJ11-AA 15Mhz	UNIX V7M 2.1	cc		 862     981
+ * VAX 11/750	w/FPA		UNIX 4.3bsd	cc		 994	 997
+ * IRIS-1400	68010-10Mhz	UNIX System V	cc		 909	1000
+ * IBM PC/AT	80286-6Mhz	Venix/86 2.1	cc		 961	1000
+ * IBM PC/AT	80286-6Mhz	PCDOS 3.0	b16cc 2.0	 943	1063
+ * Zilog S8000/11 Z8001-5.5Mhz	Zeus 3.2	cc		1011	1084
+ * NSC ICM-3216 NSC 32016-10Mhz	UNIX SVR2	cc		1041	1084
+ * IBM PC/AT	80286-6Mhz	PCDOS 3.0	MS 3.0(small)	1063	1086
+ * VAX 11/750	w/FPA		VMS		VAX-11 C 2.0	 958	1091
+ * Stride	68000-10Mhz	System-V/68	cc		1041	1111
+ * Plexus P/60  MC68000-12.5Mhz	UNIX SYSIII	Plexus		1111	1111
+ * ATT PC7300	68010-10Mhz	UNIX 5.0.2	cc		1041	1111
+ * CCC 3230	-		Xelos R01(SVR2)	cc		1040	1126
+ * Stride	68000-12Mhz	System-V/68	cc		1063	1136
+ * IBM PC/AT    80286-6Mhz      Venix/286 SVR2  cc              1056    1149
+ * Plexus P/60  MC68000-12.5Mhz	UNIX SYSIII	Plexus		1111	1163 T
+ * IBM PC/AT	80286-6Mhz	PCDOS 3.0	Datalight 1.10	1190	1190
+ * ATT PC6300+	80286-6Mhz	MSDOS 3.1	b16cc 2.0	1111	1219
+ * IBM PC/AT	80286-6Mhz	PCDOS 3.1	Wizard 2.1	1136	1219
+ * Sun2/120	68010-10Mhz	Sun 4.2BSD	cc		1136	1219
+ * IBM PC/AT	80286-6Mhz	PCDOS 3.0	CI-C86 2.20M	1219	1219
+ * WICAT PB	68000-8Mhz	System V	WICAT C 4.1	 998	1226 ~
+ * MASSCOMP 500	68010-10MHz	RTU V3.0	cc (V3.2)	1156	1238
+ * Alliant FX/8 IP (68012-12Mhz) Concentrix	cc -ip;exec -i 	1170	1243 FX
+ * Cyb DataMate	68010-12.5Mhz	Uniplus 5.0	Unisoft cc	1162	1250
+ * PDP 11/70	-		UNIX 5.2	cc		1162	1250
+ * IBM PC/AT	80286-6Mhz	PCDOS 3.1	Lattice 2.15	1250	1250
+ * IBM PC/AT	80286-7.5Mhz	Venix/86 2.1	cc		1190	1315 *15
+ * Sun2/120	68010-10Mhz	Standalone	cc		1219	1315
+ * Intel 380	80286-8Mhz	Xenix R3.0up1	cc		1250	1315 *16
+ * Sequent Balance 8000	NS32032-10MHz	Dynix 2.0	cc	1250	1315 N12
+ * IBM PC/DSI-32 32032-10Mhz	MSDOS 3.1	GreenHills 2.14	1282	1315 C3
+ * ATT 3B2/400	WE32100-?Mhz	UNIX 5.2	cc		1315	1315
+ * CCC 3250XP	-		Xelos R01(SVR2)	cc		1215	1318
+ * IBM PC/RT 032 RISC(801?)?Mhz BSD 4.2         cc              1248    1333 RT
+ * DG MV4000	-		AOS/VS 5.00	cc		1333	1333
+ * IBM PC/AT	80286-8Mhz	Venix/86 2.1	cc		1275	1380 *16
+ * IBM PC/AT	80286-6Mhz	MSDOS 3.0	Microsoft 3.0	1250	1388
+ * ATT PC6300+	80286-6Mhz	MSDOS 3.1	CI-C86 2.20M	1428	1428
+ * COMPAQ/286   80286-8Mhz      Venix/286 SVR2  cc              1326    1443
+ * IBM PC/AT    80286-7.5Mhz    Venix/286 SVR2  cc              1333    1449 *15
+ * WICAT PB	68000-8Mhz	System V	WICAT C 4.1	1169	1464 S~
+ * Tandy II/6000 68000-8Mhz	Xenix 3.0	cc      	1384	1477
+ * MicroVAX II	-		Mach/4.3	cc		1513	1536
+ * WICAT MB	68000-12.5Mhz	System V	WICAT C 4.1	1246	1537 ~
+ * IBM PC/AT    80286-9Mhz      SCO Xenix V     cc              1540    1556 *18
+ * Cyb DataMate	68010-12.5Mhz	Uniplus 5.0	Unisoft cc	1470	1562 S
+ * VAX 11/780	-		UNIX 5.2	cc		1515	1562
+ * MicroVAX-II	-		-		-		1562	1612
+ * VAX-780/MA780		Mach/4.3	cc		1587	1612
+ * VAX 11/780	-		UNIX 4.3bsd	cc		1646	1662
+ * Apollo DN660	-		AegisSR9/IX	cc 3.12		1666	1666
+ * ATT 3B20	-		UNIX 5.2	cc		1515	1724
+ * NEC PC-98XA	80286-8Mhz	PCDOS 3.1	Lattice 2.15	1724	1724 @
+ * HP9000-500	B series CPU	HP-UX 4.02	cc		1724	-
+ * Ridge 32C V1	-		ROS 3.3		Ridge C (older)	1776	-
+ * IBM PC/STD	80286-8Mhz	MSDOS 3.0 	Microsoft 3.0	1724	1785 C2
+ * WICAT MB	68000-12.5Mhz	System V	WICAT C 4.1	1450	1814 S~
+ * WICAT PB	68000-12.5Mhz	System V	WICAT C 4.1	1530	1898 ~
+ * DEC-2065	KL10-Model B	TOPS-20 6.1FT5	Port. C Comp.	1937	1946
+ * Gould PN6005	-		UTX 1.1(4.2BSD)	cc		1675	1964
+ * DEC2060	KL-10		TOPS-20		cc		2000	2000 NM
+ * Intel 310AP	80286-8Mhz	Xenix 3.0	cc		1893	2009
+ * VAX 11/785	-		UNIX 5.2	cc		2083	2083
+ * VAX 11/785	-		VMS		VAX-11 C 2.0	2083	2083
+ * VAX 11/785	-		UNIX SVR2	cc		2123	2083
+ * VAX 11/785   -               ULTRIX-32 1.1   cc		2083    2091 
+ * VAX 11/785	-		UNIX 4.3bsd	cc		2135	2136
+ * WICAT PB	68000-12.5Mhz	System V	WICAT C 4.1	1780	2233 S~
+ * Pyramid 90x	-		OSx 2.3		cc		2272	2272
+ * Pyramid 90x	FPA,cache,4Mb	OSx 2.5		cc no -O	2777	2777
+ * Pyramid 90x	w/cache		OSx 2.5		cc w/-O		3333	3333
+ * IBM-4341-II	-		VM/SP3		Waterloo C 1.2  3333	3333
+ * IRIS-2400T	68020-16.67Mhz	UNIX System V	cc		3105	3401
+ * Celerity C-1200 ?		UNIX 4.2BSD	cc		3485	3468
+ * SUN 3/75	68020-16.67Mhz	SUN 4.2 V3	cc		3333	3571
+ * IBM-4341	Model 12	UTS 5.0		?		3685	3685
+ * SUN-3/160    68020-16.67Mhz  Sun 4.2 V3.0A   cc		3381    3764
+ * Sun 3/180	68020-16.67Mhz	Sun 4.2		cc		3333	3846
+ * IBM-4341	Model 12	UTS 5.0		?		3910	3910 MN
+ * MC 5400	68020-16.67MHz	RTU V3.0	cc (V4.0)	3952	4054
+ * Intel 386/20	80386-12.5Mhz	PMON debugger	Intel C386v0.2	4149	4386
+ * NCR Tower32  68020-16.67Mhz  SYS 5.0 Rel 2.0 cc              3846	4545
+ * MC 5600/5700	68020-16.67MHz	RTU V3.0	cc (V4.0)	4504	4746 %
+ * Intel 386/20	80386-12.5Mhz	PMON debugger	Intel C386v0.2	4534	4794 i1
+ * Intel 386/20	80386-16Mhz	PMON debugger	Intel C386v0.2	5304	5607
+ * Gould PN9080	custom ECL	UTX-32 1.1C	cc		5369	5676
+ * Gould 1460-342 ECL proc      UTX/32 1.1/c    cc              5342    5677 G1
+ * VAX-784	-		Mach/4.3	cc		5882	5882 &4
+ * Intel 386/20	80386-16Mhz	PMON debugger	Intel C386v0.2	5801	6133 i1
+ * VAX 8600	-		UNIX 4.3bsd	cc		7024	7088
+ * VAX 8600	-		VMS		VAX-11 C 2.0	7142	7142
+ * Alliant FX/8 CE		Concentrix	cc -ce;exec -c 	6952	7655 FX
+ * CCI POWER 6/32		COS(SV+4.2)	cc		7500	7800
+ * CCI POWER 6/32		POWER 6 UNIX/V	cc		8236	8498
+ * CCI POWER 6/32		4.2 Rel. 1.2b	cc		8963	9544
+ * Sperry (CCI Power 6)		4.2BSD		cc		9345   10000
+ * CRAY-X-MP/12	   105Mhz	COS 1.14	Cray C         10204   10204
+ * IBM-3083	-		UTS 5.0 Rel 1	cc	       16666   12500
+ * CRAY-1A	    80Mhz	CTSS		Cray C 2.0     12100   13888
+ * IBM-3083	-		VM/CMS HPO 3.4	Waterloo C 1.2 13889   13889
+ * Amdahl 470 V/8 		UTS/V 5.2       cc v1.23       15560   15560
+ * CRAY-X-MP/48	   105Mhz	CTSS		Cray C 2.0     15625   17857
+ * Amdahl 580	-		UTS 5.0 Rel 1.2	cc v1.5        23076   23076
+ * Amdahl 5860	 		UTS/V 5.2       cc v1.23       28970   28970
+ *
+ * NOTE
+ *   *   Crystal changed from 'stock' to listed value.
+ *   +   This Macintosh was upgraded from 128K to 512K in such a way that
+ *       the new 384K of memory is not slowed down by video generator accesses.
+ *   %   Single processor; MC == MASSCOMP
+ *   NM  A version 7 C compiler written at New Mexico Tech.
+ *   @   vanilla Lattice compiler used with MicroPro standard library
+ *   S   Shorts used instead of ints
+ *   T	 with Chris Torek's patches (whatever they are).
+ *   ~   For WICAT Systems: MB=MultiBus, PB=Proprietary Bus
+ *   LM  Large Memory Model. (Otherwise, all 80x8x results are small model)
+ *   MM  Medium Memory Model. (Otherwise, all 80x8x results are small model)
+ *   C1  Univation PC TURBO Co-processor; 9.54Mhz 8086, 640K RAM
+ *   C2  Seattle Telecom STD-286 board
+ *   C3  Definicon DSI-32 coprocessor
+ *   C?  Unknown co-processor board?
+ *   CT1 Convergent Technologies MegaFrame, 1 processor.
+ *   MN  Using Mike Newtons 'optimizer' (see net.sources).
+ *   G1  This Gould machine has 2 processors and was able to run 2 dhrystone
+ *       Benchmarks in parallel with no slowdown.
+ *   FH  FHC == Frank Hogg Labs (Hazelwood Uniquad 2 in an FHL box).
+ *   FX  The Alliant FX/8 is a system consisting of 1-8 CEs (computation
+ *	 engines) and 1-12 IPs (interactive processors). Note N8 applies.
+ *   RT  This is one of the RT's that CMU has been using for awhile.  I'm
+ *	 not sure that this is identical to the machine that IBM is selling
+ *	 to the public.
+ *   i1  Normally, the 386/20 starter kit has a 16k direct mapped cache
+ *	 which inserts 2 or 3 wait states on a write thru.  These results
+ *	 were obtained by disabling the write-thru, or essentially turning
+ *	 the cache into 0 wait state memory.
+ *   Nnn This machine has multiple processors, allowing "nn" copies of the
+ *	 benchmark to run in the same time as 1 copy.
+ *   &nn This machine has "nn" processors, and the benchmark results were
+ *	 obtained by having all "nn" processors working on 1 copy of dhrystone.
+ *	 (Note, this is different than Nnn. Salesmen like this measure).
+ *   ?   I don't trust results marked with '?'.  These were sent to me with
+ *       either incomplete info, or with times that just don't make sense.
+ *	 ?? means I think the performance is too poor, ?! means too good.
+ *       If anybody can confirm these figures, please respond.
+ *
+ *  ABBREVIATIONS
+ *	CCC	Concurrent Computer Corp. (was Perkin-Elmer)
+ *	MC	Masscomp
+ *
+ *--------------------------------RESULTS END----------------------------------
+ *
+ *	The following program contains statements of a high-level programming
+ *	language (C) in a distribution considered representative:
+ *
+ *	assignments			53%
+ *	control statements		32%
+ *	procedure, function calls	15%
+ *
+ *	100 statements are dynamically executed.  The program is balanced with
+ *	respect to the three aspects:
+ *		- statement type
+ *		- operand type (for simple data types)
+ *		- operand access
+ *			operand global, local, parameter, or constant.
+ *
+ *	The combination of these three aspects is balanced only approximately.
+ *
+ *	The program does not compute anything meaningfull, but it is
+ *	syntactically and semantically correct.
+ *
+ */
+
+/* Accuracy of timings and human fatigue controlled by next two lines */
+/*#define LOOPS	5000		/* Use this for slow or 16 bit machines */
+/*#define LOOPS	50000		/* Use this for slow or 16 bit machines */
+#define LOOPS	500000		/* Use this for faster machines */
+
+/* Compiler dependent options */
+#undef	NOENUM			/* Define if compiler has no enum's */
+#undef	NOSTRUCTASSIGN		/* Define if compiler can't assign structures */
+
+/* define only one of the next three defines */
+#define GETRUSAGE		/* Use getrusage(2) time function */
+/*#define TIMES			/* Use times(2) time function */
+/*#define TIME			/* Use time(2) time function */
+
+/* define the granularity of your times(2) function (when used) */
+/*#define HZ	60		/* times(2) returns 1/60 second (most) */
+/*#define HZ	100		/* times(2) returns 1/100 second (WECo) */
+
+/* for compatibility with goofed up version */
+/*#define GOOF			/* Define if you want the goofed up version */
+
+/* default number of threads that will be spawned */
+#define DEFAULT_THREADS 1
+
+/* Dhrystones per second obtained on VAX11/780 -- a notional 1MIPS machine. */
+/* Used in DMIPS calculation. */
+#define ONE_MIPS 1757
+
+#ifdef GOOF
+char	Version[] = "1.0";
+#else
+char	Version[] = "1.1";
+#endif
+
+#ifdef	NOSTRUCTASSIGN
+#define	structassign(d, s)	memcpy(&(d), &(s), sizeof(d))
+#else
+#define	structassign(d, s)	d = s
+#endif
+
+#ifdef	NOENUM
+#define	Ident1	1
+#define	Ident2	2
+#define	Ident3	3
+#define	Ident4	4
+#define	Ident5	5
+typedef int	Enumeration;
+#else
+typedef enum	{Ident1, Ident2, Ident3, Ident4, Ident5} Enumeration;
+#endif
+
+typedef int	OneToThirty;
+typedef int	OneToFifty;
+typedef char	CapitalLetter;
+typedef char	String30[31];
+typedef int	Array1Dim[51];
+typedef int	Array2Dim[51][51];
+
+struct	Record
+{
+	struct Record		*PtrComp;
+	Enumeration		Discr;
+	Enumeration		EnumComp;
+	OneToFifty		IntComp;
+	String30		StringComp;
+};
+
+typedef struct Record 	RecordType;
+typedef RecordType *	RecordPtr;
+typedef int		boolean;
+
+//#define	NULL		0
+#define	TRUE		1
+#define	FALSE		0
+
+#ifndef REG
+#define	REG
+#endif
+
+extern Enumeration	Func1();
+extern boolean		Func2();
+
+#ifdef TIMES
+#include <sys/param.h>
+#include <sys/types.h>
+#endif
+#ifdef GETRUSAGE
+#include <sys/resource.h>
+#endif
+#include <time.h>
+#include <unistd.h>
+#include <sys/wait.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/time.h>
+
+
+main(int argc, char** argv)
+{
+	int num_threads = DEFAULT_THREADS;
+	int runtime = 0;
+	int delay = 0;
+	long mloops = 0;
+
+	int opt;
+	while ((opt = getopt(argc, argv, "ht:r:d:l:")) != -1) {
+		switch (opt) {
+			case 'h':
+				printhelp();
+				exit(0);
+				break;
+			case 't':
+				num_threads = atoi(optarg);
+				break;
+			case 'r':
+				runtime = atoi(optarg);
+				break;
+			case 'd':
+				delay = atoi(optarg);
+				break;
+			case 'l':
+				mloops = atoll(optarg);
+				break;
+		}
+	}
+
+	if (runtime && mloops) {
+		fprintf(stderr, "-r and -l options cannot be specified at the same time.\n");
+		exit(1);
+	} else if (!runtime && !mloops) {
+		fprintf(stderr, "Must specify either -r or -l option; use -h to see help.\n");
+		exit(1);
+	}
+
+	long num_loops = mloops ? mloops * 1000000L : LOOPS * num_threads;
+	run_dhrystone(runtime, num_threads, num_loops, delay);
+}
+
+run_dhrystone(int duration, int num_threads, long num_loops, int delay) {
+	printf("duration: %d seconds\n", duration);
+	printf("number of threads: %d\n", num_threads);
+	printf("number of loops: %ld\n", num_loops);
+	printf("delay between starting threads: %d seconds\n", delay);
+	printf("\n");
+
+	pid_t *children = malloc(num_threads* sizeof(pid_t));
+	int loops_per_thread = num_loops / num_threads;
+
+	clock_t run_start = clock();
+
+	long i;
+	int actual_duration;
+	for (i = 0; i < (num_threads - 1); i++) {
+		pid_t c = fork();
+		if (c == 0) {
+			// child
+			actual_duration = duration - i * delay;
+			if (actual_duration < 0)
+				actual_duration = 0;
+			run_for_duration(actual_duration, loops_per_thread);
+			exit(0);
+		}
+		
+		children[i] = c;
+		sleep(delay);
+	}
+
+	run_for_duration(duration - delay * (num_threads - 1), loops_per_thread);
+
+	for (i = 0; i < num_threads; i++) {
+		int status, w;
+		do {
+			w= wait(&status);
+		} while (w != -1 && (!WIFEXITED(status) && !WIFSIGNALED(status)));
+	}
+
+	clock_t run_end = clock();
+	printf("\nTotal dhrystone run time: %f seconds.\n", (double)(run_end - run_start) / CLOCKS_PER_SEC);
+
+	exit(0);
+}
+
+run_for_duration(int duration, long num_loops) {
+	clock_t end = clock() + duration * CLOCKS_PER_SEC;
+	do {
+		Proc0(num_loops, duration == 0);
+	} while (clock() < end);
+}
+
+printhelp() {
+	printf("Usage: dhrystone (-h | -l MLOOPS | -r DURATION) [-t THREADS [-d DELAY]]\n");
+	printf("\n");
+	printf("Runs dhrystone benchmark either for a specfied duration or for a specified\n");
+	printf("number of iterations.\n");
+	printf("\n");
+	printf("Options:\n");
+	printf("    -h          Print this message and exit.\n");
+	printf("    -l MLOOPS   Run dhrystone for the specified number of millions\n");
+	printf("                of iterations (i.e. the actual number of iterations is\n");
+	printf("                MLOOPS * 1e6).\n");
+	printf("    -r DURATION Run dhhrystone for the specified duration (in seconds). \n");
+	printf("                dhrystone will be run 500000 iterations, looping until\n");
+	printf("                the specified time period has passed.\n");
+	printf("\n");
+	printf("    Note: -r and -l options may not be specified at the same time.\n");
+	printf("\n");
+	printf("    -t THREADS  Specified the number of concurrent threads (processes,\n");
+	printf("                actually) that will be spawned. Defaults to 1.\n");
+	printf("    -d DELAY    if THREADS is > 1, this specifies the delay between\n");
+	printf("                spawning the threads.\n");
+	printf("\n");
+}
+
+
+/*
+ * Package 1
+ */
+int		IntGlob;
+boolean		BoolGlob;
+char		Char1Glob;
+char		Char2Glob;
+Array1Dim	Array1Glob;
+Array2Dim	Array2Glob;
+RecordPtr	PtrGlb;
+RecordPtr	PtrGlbNext;
+
+Proc0(long numloops, boolean print_result)
+{
+	OneToFifty		IntLoc1;
+	REG OneToFifty		IntLoc2;
+	OneToFifty		IntLoc3;
+	REG char		CharLoc;
+	REG char		CharIndex;
+	Enumeration	 	EnumLoc;
+	String30		String1Loc;
+	String30		String2Loc;
+	//	extern char		*malloc();
+
+	register unsigned int	i;
+#ifdef TIME
+	long			time();
+	long			starttime;
+	long			benchtime;
+	long			nulltime;
+
+	starttime = time( (long *) 0);
+	for (i = 0; i < numloops; ++i);
+	nulltime = time( (long *) 0) - starttime; /* Computes o'head of loop */
+#endif
+#ifdef TIMES
+	time_t			starttime;
+	time_t			benchtime;
+	time_t			nulltime;
+	struct tms		tms;
+
+	times(&tms); starttime = tms.tms_utime;
+	for (i = 0; i < numloops; ++i);
+	times(&tms);
+	nulltime = tms.tms_utime - starttime; /* Computes overhead of looping */
+#endif
+#ifdef GETRUSAGE
+	struct rusage starttime;
+	struct rusage endtime;
+	struct timeval nulltime;
+
+	getrusage(RUSAGE_SELF, &starttime);
+	for (i = 0; i < numloops; ++i);
+	getrusage(RUSAGE_SELF, &endtime);
+	nulltime.tv_sec  = endtime.ru_utime.tv_sec  - starttime.ru_utime.tv_sec;
+	nulltime.tv_usec = endtime.ru_utime.tv_usec - starttime.ru_utime.tv_usec;
+#endif
+
+	PtrGlbNext = (RecordPtr) malloc(sizeof(RecordType));
+	PtrGlb = (RecordPtr) malloc(sizeof(RecordType));
+	PtrGlb->PtrComp = PtrGlbNext;
+	PtrGlb->Discr = Ident1;
+	PtrGlb->EnumComp = Ident3;
+	PtrGlb->IntComp = 40;
+	strcpy(PtrGlb->StringComp, "DHRYSTONE PROGRAM, SOME STRING");
+#ifndef	GOOF
+	strcpy(String1Loc, "DHRYSTONE PROGRAM, 1'ST STRING");	/*GOOF*/
+#endif
+	Array2Glob[8][7] = 10;	/* Was missing in published program */
+
+/*****************
+-- Start Timer --
+*****************/
+#ifdef TIME
+	starttime = time( (long *) 0);
+#endif
+#ifdef TIMES
+	times(&tms); starttime = tms.tms_utime;
+#endif
+#ifdef GETRUSAGE
+	getrusage (RUSAGE_SELF, &starttime);
+#endif
+	for (i = 0; i < numloops; ++i)
+	{
+
+		Proc5();
+		Proc4();
+		IntLoc1 = 2;
+		IntLoc2 = 3;
+		strcpy(String2Loc, "DHRYSTONE PROGRAM, 2'ND STRING");
+		EnumLoc = Ident2;
+		BoolGlob = ! Func2(String1Loc, String2Loc);
+		while (IntLoc1 < IntLoc2)
+		{
+			IntLoc3 = 5 * IntLoc1 - IntLoc2;
+			Proc7(IntLoc1, IntLoc2, &IntLoc3);
+			++IntLoc1;
+		}
+		Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3);
+		Proc1(PtrGlb);
+		for (CharIndex = 'A'; CharIndex <= Char2Glob; ++CharIndex)
+			if (EnumLoc == Func1(CharIndex, 'C'))
+				Proc6(Ident1, &EnumLoc);
+		IntLoc3 = IntLoc2 * IntLoc1;
+		IntLoc2 = IntLoc3 / IntLoc1;
+		IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1;
+		Proc2(&IntLoc1);
+	}
+
+/*****************
+-- Stop Timer --
+*****************/
+
+	if (print_result) {
+#ifdef TIME
+		benchtime = time( (long *) 0) - starttime - nulltime;
+		printf("Dhrystone(%s) time for %ld passes = %ld\n",
+			Version,
+			(long) numloops, benchtime);
+		printf("This machine benchmarks at %ld dhrystones/second\n",
+			((long) numloops) / benchtime);
+		printf("                           %ld DMIPS\n",
+			((long) numloops) / benchtime / ONE_MIPS);
+#endif
+#ifdef TIMES
+		times(&tms);
+		benchtime = tms.tms_utime - starttime - nulltime;
+		printf("Dhrystone(%s) time for %ld passes = %ld\n",
+			Version,
+			(long) numloops, benchtime/HZ);
+		printf("This machine benchmarks at %ld dhrystones/second\n",
+			((long) numloops) * HZ / benchtime);
+		printf("                           %ld DMIPS\n",
+			((long) numloops) * HZ / benchtime / ONE_MIPS);
+#endif
+#ifdef GETRUSAGE
+		getrusage(RUSAGE_SELF, &endtime);
+		{
+		    double t = (double)(endtime.ru_utime.tv_sec
+					- starttime.ru_utime.tv_sec
+					- nulltime.tv_sec)
+			     + (double)(endtime.ru_utime.tv_usec
+					- starttime.ru_utime.tv_usec
+					- nulltime.tv_usec) * 1e-6;
+		    printf("Dhrystone(%s) time for %ld passes = %.1f\n",
+			   Version,
+			   (long)numloops,
+			   t);
+		    printf("This machine benchmarks at %.0f dhrystones/second\n",
+			   (double)numloops / t);
+		    printf("                           %.0f DMIPS\n",
+			   (double)numloops / t / ONE_MIPS);
+		}
+#endif
+	}
+
+}
+
+Proc1(PtrParIn)
+REG RecordPtr	PtrParIn;
+{
+#define	NextRecord	(*(PtrParIn->PtrComp))
+
+	structassign(NextRecord, *PtrGlb);
+	PtrParIn->IntComp = 5;
+	NextRecord.IntComp = PtrParIn->IntComp;
+	NextRecord.PtrComp = PtrParIn->PtrComp;
+	Proc3(NextRecord.PtrComp);
+	if (NextRecord.Discr == Ident1)
+	{
+		NextRecord.IntComp = 6;
+		Proc6(PtrParIn->EnumComp, &NextRecord.EnumComp);
+		NextRecord.PtrComp = PtrGlb->PtrComp;
+		Proc7(NextRecord.IntComp, 10, &NextRecord.IntComp);
+	}
+	else
+		structassign(*PtrParIn, NextRecord);
+
+#undef	NextRecord
+}
+
+Proc2(IntParIO)
+OneToFifty	*IntParIO;
+{
+	REG OneToFifty		IntLoc;
+	REG Enumeration		EnumLoc;
+
+	IntLoc = *IntParIO + 10;
+	for(;;)
+	{
+		if (Char1Glob == 'A')
+		{
+			--IntLoc;
+			*IntParIO = IntLoc - IntGlob;
+			EnumLoc = Ident1;
+		}
+		if (EnumLoc == Ident1)
+			break;
+	}
+}
+
+Proc3(PtrParOut)
+RecordPtr	*PtrParOut;
+{
+	if (PtrGlb != NULL)
+		*PtrParOut = PtrGlb->PtrComp;
+	else
+		IntGlob = 100;
+	Proc7(10, IntGlob, &PtrGlb->IntComp);
+}
+
+Proc4()
+{
+	REG boolean	BoolLoc;
+
+	BoolLoc = Char1Glob == 'A';
+	BoolLoc |= BoolGlob;
+	Char2Glob = 'B';
+}
+
+Proc5()
+{
+	Char1Glob = 'A';
+	BoolGlob = FALSE;
+}
+
+extern boolean Func3();
+
+Proc6(EnumParIn, EnumParOut)
+REG Enumeration	EnumParIn;
+REG Enumeration	*EnumParOut;
+{
+	*EnumParOut = EnumParIn;
+	if (! Func3(EnumParIn) )
+		*EnumParOut = Ident4;
+	switch (EnumParIn)
+	{
+	case Ident1:	*EnumParOut = Ident1; break;
+	case Ident2:	if (IntGlob > 100) *EnumParOut = Ident1;
+			else *EnumParOut = Ident4;
+			break;
+	case Ident3:	*EnumParOut = Ident2; break;
+	case Ident4:	break;
+	case Ident5:	*EnumParOut = Ident3;
+	}
+}
+
+Proc7(IntParI1, IntParI2, IntParOut)
+OneToFifty	IntParI1;
+OneToFifty	IntParI2;
+OneToFifty	*IntParOut;
+{
+	REG OneToFifty	IntLoc;
+
+	IntLoc = IntParI1 + 2;
+	*IntParOut = IntParI2 + IntLoc;
+}
+
+Proc8(Array1Par, Array2Par, IntParI1, IntParI2)
+Array1Dim	Array1Par;
+Array2Dim	Array2Par;
+OneToFifty	IntParI1;
+OneToFifty	IntParI2;
+{
+	REG OneToFifty	IntLoc;
+	REG OneToFifty	IntIndex;
+
+	IntLoc = IntParI1 + 5;
+	Array1Par[IntLoc] = IntParI2;
+	Array1Par[IntLoc+1] = Array1Par[IntLoc];
+	Array1Par[IntLoc+30] = IntLoc;
+	for (IntIndex = IntLoc; IntIndex <= (IntLoc+1); ++IntIndex)
+		Array2Par[IntLoc][IntIndex] = IntLoc;
+	++Array2Par[IntLoc][IntLoc-1];
+	Array2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc];
+	IntGlob = 5;
+}
+
+Enumeration Func1(CharPar1, CharPar2)
+CapitalLetter	CharPar1;
+CapitalLetter	CharPar2;
+{
+	REG CapitalLetter	CharLoc1;
+	REG CapitalLetter	CharLoc2;
+
+	CharLoc1 = CharPar1;
+	CharLoc2 = CharLoc1;
+	if (CharLoc2 != CharPar2)
+		return (Ident1);
+	else
+		return (Ident2);
+}
+
+boolean Func2(StrParI1, StrParI2)
+String30	StrParI1;
+String30	StrParI2;
+{
+	REG OneToThirty		IntLoc;
+	REG CapitalLetter	CharLoc;
+
+	IntLoc = 1;
+	while (IntLoc <= 1)
+		if (Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1)
+		{
+			CharLoc = 'A';
+			++IntLoc;
+		}
+	if (CharLoc >= 'W' && CharLoc <= 'Z')
+		IntLoc = 7;
+	if (CharLoc == 'X')
+		return(TRUE);
+	else
+	{
+		if (strcmp(StrParI1, StrParI2) > 0)
+		{
+			IntLoc += 7;
+			return (TRUE);
+		}
+		else
+			return (FALSE);
+	}
+}
+
+boolean Func3(EnumParIn)
+REG Enumeration	EnumParIn;
+{
+	REG Enumeration	EnumLoc;
+
+	EnumLoc = EnumParIn;
+	if (EnumLoc == Ident3) return (TRUE);
+	return (FALSE);
+}
+
+#ifdef	NOSTRUCTASSIGN
+memcpy(d, s, l)
+register char	*d;
+register char	*s;
+register int	l;
+{
+	while (l--) *d++ = *s++;
+}
+#endif
+/* ---------- */