mirror of
https://github.com/ARM-software/workload-automation.git
synced 2025-07-15 19:43:28 +01:00
Compare commits
106 Commits
Author | SHA1 | Date | |
---|---|---|---|
050329a5ee | |||
d9e7aa9af0 | |||
125cd3bb41 | |||
75ea78ea4f | |||
12bb21045e | |||
4bb1f4988f | |||
0ff6b4842a | |||
98b787e326 | |||
e915436661 | |||
68e1806c07 | |||
f19ebb79ee | |||
c950f5ec8f | |||
6aaa28781b | |||
d87025ad3a | |||
ac5819da8e | |||
31e08a6477 | |||
47769cf28d | |||
d8601880ac | |||
0efc9b9ccd | |||
501d3048a5 | |||
c4daccd800 | |||
db944629f3 | |||
564738a2ad | |||
c092128e94 | |||
463840d2b7 | |||
43633ab362 | |||
a6f0ab31e4 | |||
72fd5b5139 | |||
766bb4da1a | |||
a5f0521353 | |||
3435c36b98 | |||
bd252a6471 | |||
f46851a3b4 | |||
8910234448 | |||
1108c5701e | |||
f5d1a9e94a | |||
959106d61b | |||
0aea3abcaf | |||
24ccc024f8 | |||
42ab811032 | |||
832ed797e1 | |||
31b44e447e | |||
179b2e2264 | |||
22437359b6 | |||
2347c8c007 | |||
52a0a79012 | |||
60693e1b65 | |||
8ddf16dfea | |||
9aec4850c2 | |||
bdaa26d772 | |||
d7aedae69c | |||
45af8c69b8 | |||
e398083f6e | |||
4ce41407e9 | |||
aa0564e8f3 | |||
83f826d6fe | |||
1599b59770 | |||
8cd9862e32 | |||
b4ea2798dd | |||
76e6f14212 | |||
ce59318e66 | |||
5652057adb | |||
e9f5577237 | |||
ec3d928b3b | |||
ee8bab365b | |||
e3406bdb74 | |||
55d983ecaf | |||
f8908e8194 | |||
dd44d6fa16 | |||
753786a45c | |||
8647ceafd8 | |||
2c2118ad23 | |||
0ec8427d05 | |||
cf5c3a2723 | |||
8ddc1c1eba | |||
b5db4afc05 | |||
f977c3dfc8 | |||
769aae3047 | |||
a1ba3c6f69 | |||
536fc7eb92 | |||
de36dacb82 | |||
637bf57cbc | |||
60ffd27bba | |||
984a74a6ca | |||
5b8dc1779c | |||
ba0cd7f842 | |||
adb3ffa6aa | |||
bedd3bf062 | |||
03e463ad4a | |||
2ce8d6fc95 | |||
1415f61e36 | |||
6ab1ae74a6 | |||
a1cecc0002 | |||
0cba3c68dc | |||
f267fc9277 | |||
462a5b651a | |||
7cd7b73f58 | |||
4a9a2ad105 | |||
9f88459f56 | |||
a2087ea467 | |||
31a5a95803 | |||
3f202205a5 | |||
ce7720b26d | |||
766b96e2ad | |||
3c9de98a4b | |||
5263cfd6f8 |
README.rst
doc
build_instrument_method_map.pybuild_plugin_docs.py
source
extras
requirements.txtsetup.pytests
wa
__init__.py
commands
framework
instruments
output_processors
utils
workloads
aitutu
__init__.pycom.arm.wa.uiauto.aitutu.apk
uiauto
chrome
dhrystone
exoplayer
geekbench
glbenchmark
hackbench
hwuitest
lmbench
manual
meabo
memcpy
mongoperf
motionmark
__init__.pycom.arm.wa.uiauto.motionmark.apk
uiauto
openssl
pcmark
rt_app
shellscript
speedometer
stress_ng
sysbench
uibench
uibenchjanktests
vellamo
@ -30,7 +30,11 @@ Installation
|
||||
To install::
|
||||
|
||||
git clone git@github.com:ARM-software/workload-automation.git workload-automation
|
||||
sudo -H pip install ./workload-automation
|
||||
sudo -H python setup [install|develop]
|
||||
|
||||
Note: A `requirements.txt` is included however this is designed to be used as a
|
||||
reference for known working versions rather than as part of a standard
|
||||
installation.
|
||||
|
||||
Please refer to the `installation section <http://workload-automation.readthedocs.io/en/latest/user_information.html#install>`_
|
||||
in the documentation for more details.
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2015-2015 ARM Limited
|
||||
# Copyright 2015-2019 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -26,7 +26,7 @@ OUTPUT_TEMPLATE_FILE = os.path.join(os.path.dirname(__file__), 'source', 'instr
|
||||
|
||||
|
||||
def generate_instrument_method_map(outfile):
|
||||
signal_table = format_simple_table([(k, v) for k, v in SIGNAL_MAP.iteritems()],
|
||||
signal_table = format_simple_table([(k, v) for k, v in SIGNAL_MAP.items()],
|
||||
headers=['method name', 'signal'], align='<<')
|
||||
priority_table = format_simple_table(zip(CallbackPriority.names, CallbackPriority.values),
|
||||
headers=['decorator', 'priority'], align='<>')
|
||||
@ -37,4 +37,4 @@ def generate_instrument_method_map(outfile):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
generate_instrumentation_method_map(sys.argv[1])
|
||||
generate_instrument_method_map(sys.argv[1])
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2014-2015 ARM Limited
|
||||
# Copyright 2014-2019 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -25,7 +25,12 @@ from wa.utils.doc import (strip_inlined_text, get_rst_from_plugin,
|
||||
get_params_rst, underline, line_break)
|
||||
from wa.utils.misc import capitalize
|
||||
|
||||
GENERATE_FOR_PACKAGES = ['wa.workloads', 'wa.instruments', 'wa.output_processors']
|
||||
GENERATE_FOR_PACKAGES = [
|
||||
'wa.workloads',
|
||||
'wa.instruments',
|
||||
'wa.output_processors',
|
||||
]
|
||||
|
||||
|
||||
def insert_contents_table(title='', depth=1):
|
||||
"""
|
||||
@ -41,6 +46,7 @@ def insert_contents_table(title='', depth=1):
|
||||
|
||||
|
||||
def generate_plugin_documentation(source_dir, outdir, ignore_paths):
|
||||
# pylint: disable=unused-argument
|
||||
pluginloader.clear()
|
||||
pluginloader.update(packages=GENERATE_FOR_PACKAGES)
|
||||
if not os.path.exists(outdir):
|
||||
@ -57,7 +63,7 @@ def generate_plugin_documentation(source_dir, outdir, ignore_paths):
|
||||
exts = pluginloader.list_plugins(ext_type)
|
||||
sorted_exts = iter(sorted(exts, key=lambda x: x.name))
|
||||
try:
|
||||
wfh.write(get_rst_from_plugin(sorted_exts.next()))
|
||||
wfh.write(get_rst_from_plugin(next(sorted_exts)))
|
||||
except StopIteration:
|
||||
return
|
||||
for ext in sorted_exts:
|
||||
@ -73,9 +79,11 @@ def generate_target_documentation(outdir):
|
||||
'juno_linux',
|
||||
'juno_android']
|
||||
|
||||
intro = '\nThis is a list of commonly used targets and their device '\
|
||||
'parameters, to see a complete for a complete reference please use the '\
|
||||
'WA :ref:`list command <list-command>`.\n\n\n'
|
||||
intro = (
|
||||
'\nThis is a list of commonly used targets and their device '
|
||||
'parameters, to see a complete for a complete reference please use the'
|
||||
' WA :ref:`list command <list-command>`.\n\n\n'
|
||||
)
|
||||
|
||||
pluginloader.clear()
|
||||
pluginloader.update(packages=['wa.framework.target.descriptor'])
|
||||
@ -112,7 +120,8 @@ def generate_config_documentation(config, outdir):
|
||||
if not os.path.exists(outdir):
|
||||
os.mkdir(outdir)
|
||||
|
||||
outfile = os.path.join(outdir, '{}.rst'.format('_'.join(config.name.split())))
|
||||
config_name = '_'.join(config.name.split())
|
||||
outfile = os.path.join(outdir, '{}.rst'.format(config_name))
|
||||
with open(outfile, 'w') as wfh:
|
||||
wfh.write(get_params_rst(config.config_points))
|
||||
|
||||
|
@ -315,9 +315,12 @@ methods
|
||||
|
||||
.. method:: RunDatabaseOutput.get_artifact_path(name)
|
||||
|
||||
Returns a `StringIO` object containing the contents of the artifact
|
||||
specified by ``name``. This will only look at the run artifacts; this will
|
||||
not search the artifacts of the individual jobs.
|
||||
If the artifcat is a file this method returns a `StringIO` object containing
|
||||
the contents of the artifact specified by ``name``. If the aritifcat is a
|
||||
directory, the method returns a path to a locally extracted version of the
|
||||
directory which is left to the user to remove after use. This will only look
|
||||
at the run artifacts; this will not search the artifacts of the individual
|
||||
jobs.
|
||||
|
||||
:param name: The name of the artifact who's path to retrieve.
|
||||
:return: A `StringIO` object with the contents of the artifact
|
||||
@ -452,8 +455,11 @@ methods
|
||||
|
||||
.. method:: JobDatabaseOutput.get_artifact_path(name)
|
||||
|
||||
Returns a ``StringIO`` object containing the contents of the artifact
|
||||
specified by ``name`` associated with this job.
|
||||
If the artifcat is a file this method returns a `StringIO` object containing
|
||||
the contents of the artifact specified by ``name`` associated with this job.
|
||||
If the aritifcat is a directory, the method returns a path to a locally
|
||||
extracted version of the directory which is left to the user to remove after
|
||||
use.
|
||||
|
||||
:param name: The name of the artifact who's path to retrieve.
|
||||
:return: A `StringIO` object with the contents of the artifact
|
||||
@ -497,6 +503,11 @@ A :class:`Metric` has the following attributes:
|
||||
or they may have been added by the workload to help distinguish between
|
||||
otherwise identical metrics.
|
||||
|
||||
``label``
|
||||
This is a string constructed from the name and classifiers, to provide a
|
||||
more unique identifier, e.g. for grouping values across iterations. The
|
||||
format is in the form ``name/cassifier1=value1/classifier2=value2/...``.
|
||||
|
||||
|
||||
:class:`Artifact`
|
||||
-----------------
|
||||
|
@ -178,6 +178,16 @@ methods.
|
||||
locations) and device will be searched for an application with a matching
|
||||
package name.
|
||||
|
||||
``supported_versions``
|
||||
This attribute should be a list of apk versions that are suitable for this
|
||||
workload, if a specific apk version is not specified then any available
|
||||
supported version may be chosen.
|
||||
|
||||
``activity``
|
||||
This attribute can be optionally set to override the default activity that
|
||||
will be extracted from the selected APK file which will be used when
|
||||
launching the APK.
|
||||
|
||||
``view``
|
||||
This is the "view" associated with the application. This is used by
|
||||
instruments like ``fps`` to monitor the current framerate being generated by
|
||||
|
@ -2,6 +2,121 @@
|
||||
What's New in Workload Automation
|
||||
=================================
|
||||
|
||||
*************
|
||||
Version 3.1.4
|
||||
*************
|
||||
|
||||
.. warning:: This is the last release that supports Python 2. Subsequent versions
|
||||
will be support Python 3.5+ only.
|
||||
|
||||
New Features:
|
||||
==============
|
||||
|
||||
Framework:
|
||||
----------
|
||||
- ``ApkWorkload``: Allow specifying A maximum and minimum version of an APK
|
||||
instead of requiring a specific version.
|
||||
- ``TestPackageHandler``: Added to support running android applications that
|
||||
are invoked via ``am instrument``.
|
||||
- Directories can now be added as ``Artifacts``.
|
||||
|
||||
Workloads:
|
||||
----------
|
||||
- ``aitutu``: Executes the Aitutu Image Speed/Accuracy and Object
|
||||
Speed/Accuracy tests.
|
||||
- ``uibench``: Run a configurable activity of the UIBench workload suite.
|
||||
- ``uibenchjanktests``: Run an automated and instrument version of the
|
||||
UIBench JankTests.
|
||||
- ``motionmark``: Run a browser graphical benchmark.
|
||||
|
||||
Other:
|
||||
------
|
||||
- Added ``requirements.txt`` as a reference for known working package versions.
|
||||
|
||||
Fixes/Improvements
|
||||
==================
|
||||
|
||||
Framework:
|
||||
----------
|
||||
- ``JobOuput``: Added an ``augmentation`` attribute to allow listing of
|
||||
enabled augmentations for individual jobs.
|
||||
- Better error handling for misconfiguration job selection.
|
||||
- All ``Workload`` classes now have an ``uninstall`` parameter to control whether
|
||||
any binaries installed to the target should be uninstalled again once the
|
||||
run has completed.
|
||||
- The ``cleanup_assets`` parameter is now more consistently utilized across
|
||||
workloads.
|
||||
- ``ApkWorkload``: Added an ``activity`` attribute to allow for overriding the
|
||||
automatically detected version from the APK.
|
||||
- ``ApkWorkload`` Added support for providing an implicit activity path.
|
||||
- Fixed retrieving job level artifacts from a database backend.
|
||||
|
||||
Output Processors:
|
||||
------------------
|
||||
- ``SysfsExtractor``: Ensure that the extracted directories are added as
|
||||
``Artifacts``.
|
||||
- ``InterruptStatsInstrument``: Ensure that the output files are added as
|
||||
``Artifacts``.
|
||||
- ``Postgres``: Fix missing ``system_id`` field from ``TargetInfo``.
|
||||
- ``Postgres``: Support uploading directory ``Artifacts``.
|
||||
- ``Postgres``: Bump the schema version to v1.3.
|
||||
|
||||
Workloads:
|
||||
----------
|
||||
- ``geekbench``: Improved apk version handling.
|
||||
- ``geekbench``: Now supports apk version 4.3.2.
|
||||
|
||||
Other:
|
||||
------
|
||||
- ``Dockerfile``: Now installs all optional extras for use with WA.
|
||||
- Fixed support for YAML anchors.
|
||||
- Fixed building of documentation with Python 3.
|
||||
- Changed shorthand of installing all of WA extras to `all` as per
|
||||
the documentation.
|
||||
- Upgraded the Dockerfile to use Ubuntu 18.10 and Python 3.
|
||||
- Restricted maxium versions of ``numpy`` and ``pandas`` for Python 2.7.
|
||||
|
||||
|
||||
*************
|
||||
Version 3.1.3
|
||||
*************
|
||||
|
||||
Fixes/Improvements
|
||||
==================
|
||||
|
||||
Other:
|
||||
------
|
||||
- Security update for PyYAML to attempt prevention of arbitrary code execution
|
||||
during parsing.
|
||||
|
||||
*************
|
||||
Version 3.1.2
|
||||
*************
|
||||
|
||||
Fixes/Improvements
|
||||
==================
|
||||
|
||||
Framework:
|
||||
----------
|
||||
- Implement an explicit check for Devlib versions to ensure that versions
|
||||
are kept in sync with each other.
|
||||
- Added a ``View`` parameter to ApkWorkloads for use with certain instruments
|
||||
for example ``fps``.
|
||||
- Added ``"supported_versions"`` attribute to workloads to allow specifying a
|
||||
list of supported version for a particular workload.
|
||||
- Change default behaviour to run any available version of a workload if a
|
||||
specific version is not specified.
|
||||
|
||||
Output Processors:
|
||||
------------------
|
||||
- ``Postgres``: Fix handling of ``screen_resoultion`` during processing.
|
||||
|
||||
Other
|
||||
-----
|
||||
- Added additional information to documentation
|
||||
- Added fix for Devlib's ``KernelConfig`` refactor
|
||||
- Added a ``"label"`` property to ``Metrics``
|
||||
|
||||
*************
|
||||
Version 3.1.1
|
||||
*************
|
||||
|
@ -5,10 +5,12 @@ Convention for Naming revent Files for Revent Workloads
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
There is a convention for naming revent files which you should follow if you
|
||||
want to record your own revent files. Each revent file must start with the
|
||||
device name(case sensitive) then followed by a dot '.' then the stage name
|
||||
then '.revent'. All your custom revent files should reside at
|
||||
``'~/.workload_automation/dependencies/WORKLOAD NAME/'``. These are the current
|
||||
want to record your own revent files. Each revent file must be called (case sensitive)
|
||||
``<device name>.<stage>.revent``,
|
||||
where ``<device name>`` is the name of your device (as defined by the model
|
||||
name of your device which can be retrieved with
|
||||
``adb shell getprop ro.product.model`` or by the ``name`` attribute of your
|
||||
customized device class), and ``<stage>`` is one of the following currently
|
||||
supported stages:
|
||||
|
||||
:setup: This stage is where the application is loaded (if present). It is
|
||||
@ -26,10 +28,12 @@ Only the run stage is mandatory, the remaining stages will be replayed if a
|
||||
recording is present otherwise no actions will be performed for that particular
|
||||
stage.
|
||||
|
||||
For instance, to add a custom revent files for a device named "mydevice" and
|
||||
a workload name "myworkload", you need to add the revent files to the directory
|
||||
``/home/$WA_USER_HOME/dependencies/myworkload/revent_files`` creating it if
|
||||
necessary. ::
|
||||
All your custom revent files should reside at
|
||||
``'$WA_USER_DIRECTORY/dependencies/WORKLOAD NAME/'``. So
|
||||
typically to add a custom revent files for a device named "mydevice" and a
|
||||
workload name "myworkload", you would need to add the revent files to the
|
||||
directory ``~/.workload_automation/dependencies/myworkload/revent_files``
|
||||
creating the directory structure if necessary. ::
|
||||
|
||||
mydevice.setup.revent
|
||||
mydevice.run.revent
|
||||
|
@ -69,7 +69,55 @@ WA3 config file.
|
||||
|
||||
**Q:** My Juno board keeps resetting upon starting WA even if it hasn't crashed.
|
||||
--------------------------------------------------------------------------------
|
||||
Please ensure that you do not have any other terminals (e.g. ``screen``
|
||||
**A** Please ensure that you do not have any other terminals (e.g. ``screen``
|
||||
sessions) connected to the board's UART. When WA attempts to open the connection
|
||||
for its own use this can cause the board to reset if a connection is already
|
||||
present.
|
||||
|
||||
|
||||
**Q:** I'm using the FPS instrument but I do not get any/correct results for my workload
|
||||
-----------------------------------------------------------------------------------------
|
||||
|
||||
**A:** If your device is running with Android 6.0 + then the default utility for
|
||||
collecting fps metrics will be ``gfxinfo`` however this does not seem to be able
|
||||
to extract any meaningful information for some workloads. In this case please
|
||||
try setting the ``force_surfaceflinger`` parameter for the ``fps`` augmentation
|
||||
to ``True``. This will attempt to guess the "View" for the workload
|
||||
automatically however this is device specific and therefore may need
|
||||
customizing. If this is required please open the application and execute
|
||||
``dumpsys SurfaceFlinger --list`` on the device via adb. This will provide a
|
||||
list of all views available for measuring.
|
||||
|
||||
As an example, when trying to find the view for the AngryBirds Rio workload you
|
||||
may get something like:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
...
|
||||
AppWindowToken{41dfe54 token=Token{77819a7 ActivityRecord{a151266 u0 com.rovio.angrybirdsrio/com.rovio.fusion.App t506}}}#0
|
||||
a3d001c com.rovio.angrybirdsrio/com.rovio.fusion.App#0
|
||||
Background for -SurfaceView - com.rovio.angrybirdsrio/com.rovio.fusion.App#0
|
||||
SurfaceView - com.rovio.angrybirdsrio/com.rovio.fusion.App#0
|
||||
com.rovio.angrybirdsrio/com.rovio.fusion.App#0
|
||||
boostedAnimationLayer#0
|
||||
mAboveAppWindowsContainers#0
|
||||
...
|
||||
|
||||
From these ``"SurfaceView - com.rovio.angrybirdsrio/com.rovio.fusion.App#0"`` is
|
||||
the mostly likely the View that needs to be set as the ``view`` workload
|
||||
parameter and will be picked up be the ``fps`` augmentation.
|
||||
|
||||
|
||||
**Q:** I am getting an error which looks similar to ``'CONFIG_SND_BT87X is not exposed in kernel config'...``
|
||||
-------------------------------------------------------------------------------------------------------------
|
||||
**A:** If you are receiving this under normal operation this can be caused by a
|
||||
mismatch of your WA and devlib versions. Please update both to their latest
|
||||
versions and delete your ``$USER_HOME/.workload_automation/cache/targets.json``
|
||||
(or equivalent) file.
|
||||
|
||||
**Q:** I get an error which looks similar to ``UnicodeDecodeError('ascii' codec can't decode byte...``
|
||||
------------------------------------------------------------------------------------------------------
|
||||
**A:** If you receive this error or a similar warning about your environment,
|
||||
please ensure that you configure your environment to use a locale which supports
|
||||
UTF-8. Otherwise this can cause issues when attempting to parse files containing
|
||||
none ascii characters.
|
||||
|
@ -16,7 +16,7 @@ Configuration
|
||||
Default configuration file change
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Instead of the standard ``config.py`` file located at
|
||||
``$WA_USER_HOME/config.py`` WA now uses a ``confg.yaml`` file (at the same
|
||||
``$WA_USER_DIRECTORY/config.py`` WA now uses a ``confg.yaml`` file (at the same
|
||||
location) which is written in the YAML format instead of python. Additionally
|
||||
upon first invocation WA3 will automatically try and detect whether a WA2 config
|
||||
file is present and convert it to use the new WA3 format. During this process
|
||||
|
@ -14,9 +14,9 @@ Using revent with workloads
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some workloads (pretty much all games) rely on recorded revents for their
|
||||
execution. ReventWorkloads will require between 1 and 4 revent files be be ran.
|
||||
There is one mandatory recording ``run`` for performing the actual execution of
|
||||
the workload and the remaining are optional. ``setup`` can be used to perform
|
||||
execution. ReventWorkloads require between 1 and 4 revent files to be ran.
|
||||
There is one mandatory recording, ``run``, for performing the actual execution of
|
||||
the workload and the remaining stages are optional. ``setup`` can be used to perform
|
||||
the initial setup (navigating menus, selecting game modes, etc).
|
||||
``extract_results`` can be used to perform any actions after the main stage of
|
||||
the workload for example to navigate a results or summary screen of the app. And
|
||||
@ -26,17 +26,21 @@ exiting the app.
|
||||
Because revents are very device-specific\ [*]_, these files would need to
|
||||
be recorded for each device.
|
||||
|
||||
The files must be called ``<device name>.(setup|run|extract_results|teardown).revent``
|
||||
, where ``<device name>`` is the name of your device (as defined by the ``name``
|
||||
attribute of your device's class). WA will look for these files in two
|
||||
places: ``<install dir>/wa/workloads/<workload name>/revent_files``
|
||||
and ``~/.workload_automation/dependencies/<workload name>``. The first
|
||||
location is primarily intended for revent files that come with WA (and if
|
||||
The files must be called ``<device name>.(setup|run|extract_results|teardown).revent``,
|
||||
where ``<device name>`` is the name of your device (as defined by the model
|
||||
name of your device which can be retrieved with
|
||||
``adb shell getprop ro.product.model`` or by the ``name`` attribute of your
|
||||
customized device class).
|
||||
|
||||
WA will look for these files in two places:
|
||||
``<installdir>/wa/workloads/<workload name>/revent_files`` and
|
||||
``$WA_USER_DIRECTORY/dependencies/<workload name>``. The
|
||||
first location is primarily intended for revent files that come with WA (and if
|
||||
you did a system-wide install, you'll need sudo to add files there), so it's
|
||||
probably easier to use the second location for the files you record. Also,
|
||||
if revent files for a workload exist in both locations, the files under
|
||||
``~/.workload_automation/dependencies`` will be used in favour of those
|
||||
installed with WA.
|
||||
probably easier to use the second location for the files you record. Also, if
|
||||
revent files for a workload exist in both locations, the files under
|
||||
``$WA_USER_DIRECTORY/dependencies`` will be used in favour
|
||||
of those installed with WA.
|
||||
|
||||
.. [*] It's not just about screen resolution -- the event codes may be different
|
||||
even if devices use the same screen.
|
||||
|
@ -12,8 +12,9 @@ Installation
|
||||
.. module:: wa
|
||||
|
||||
This page describes the 3 methods of installing Workload Automation 3. The first
|
||||
option is to use :ref:`pip` which
|
||||
will install the latest release of WA, the latest development version from :ref:`github <github>` or via a :ref:`dockerfile`.
|
||||
option is to use :ref:`pip` which will install the latest release of WA, the
|
||||
latest development version from :ref:`github <github>` or via a
|
||||
:ref:`dockerfile`.
|
||||
|
||||
|
||||
Prerequisites
|
||||
@ -97,8 +98,8 @@ similar distributions, this may be done with APT::
|
||||
If you do run into this issue after already installing some packages,
|
||||
you can resolve it by running ::
|
||||
|
||||
sudo chmod -R a+r /usr/local/lib/python2.7/dist-packagessudo
|
||||
find /usr/local/lib/python2.7/dist-packages -type d -exec chmod a+x {} \;
|
||||
sudo chmod -R a+r /usr/local/lib/python2.7/dist-packages
|
||||
sudo find /usr/local/lib/python2.7/dist-packages -type d -exec chmod a+x {} \;
|
||||
|
||||
(The paths above will work for Ubuntu; they may need to be adjusted
|
||||
for other distros).
|
||||
@ -171,9 +172,11 @@ install them upfront (e.g. if you're planning to use WA to an environment that
|
||||
may not always have Internet access).
|
||||
|
||||
* nose
|
||||
* PyDAQmx
|
||||
* pymongo
|
||||
* jinja2
|
||||
* mock
|
||||
* daqpower
|
||||
* sphinx
|
||||
* sphinx_rtd_theme
|
||||
* psycopg2-binary
|
||||
|
||||
|
||||
|
||||
@ -199,6 +202,18 @@ Alternatively, you can also install the latest development version from GitHub
|
||||
cd workload-automation
|
||||
sudo -H python setup.py install
|
||||
|
||||
.. note:: Please note that if using pip to install from github this will most
|
||||
likely result in an older and incompatible version of devlib being
|
||||
installed alongside WA. If you wish to use pip please also manually
|
||||
install the latest version of
|
||||
`devlib <https://github.com/ARM-software/devlib>`_.
|
||||
|
||||
.. note:: Please note that while a `requirements.txt` is included, this is
|
||||
designed to be a reference of known working packages rather to than to
|
||||
be used as part of a standard installation. The version restrictions
|
||||
in place as part of `setup.py` should automatically ensure the correct
|
||||
packages are install however if encountering issues please try
|
||||
updating/downgrading to the package versions list within.
|
||||
|
||||
|
||||
If the above succeeds, try ::
|
||||
@ -222,7 +237,7 @@ image in a container.
|
||||
|
||||
The Dockerfile can be found in the "extras" directory or online at
|
||||
`<https://github.com/ARM-software /workload- automation/blob/next/extras/Dockerfile>`_
|
||||
which contains addional information about how to build and to use the file.
|
||||
which contains additional information about how to build and to use the file.
|
||||
|
||||
|
||||
(Optional) Post Installation
|
||||
|
@ -125,7 +125,7 @@ There are multiple options for configuring your device depending on your
|
||||
particular use case.
|
||||
|
||||
You can either add your configuration to the default configuration file
|
||||
``config.yaml``, under the ``$WA_USER_HOME/`` directory or you can specify it in
|
||||
``config.yaml``, under the ``$WA_USER_DIRECTORY/`` directory or you can specify it in
|
||||
the ``config`` section of your agenda directly.
|
||||
|
||||
Alternatively if you are using multiple devices, you may want to create separate
|
||||
|
@ -6,7 +6,7 @@
|
||||
#
|
||||
# docker build -t wa .
|
||||
#
|
||||
# This will create an image called wadocker, which is preconfigured to
|
||||
# This will create an image called wa, which is preconfigured to
|
||||
# run WA and devlib. Please note that the build process automatically
|
||||
# accepts the licenses for the Android SDK, so please be sure that you
|
||||
# are willing to accept these prior to building and running the image
|
||||
@ -38,21 +38,27 @@
|
||||
#
|
||||
|
||||
# We want to make sure to base this on a recent ubuntu release
|
||||
FROM ubuntu:17.10
|
||||
FROM ubuntu:18.10
|
||||
|
||||
# Please update the references below to use different versions of
|
||||
# devlib, WA or the Android SDK
|
||||
ARG DEVLIB_REF=v1.1.0
|
||||
ARG WA_REF=v3.1.1
|
||||
ARG DEVLIB_REF=v1.1.2
|
||||
ARG WA_REF=v3.1.4
|
||||
ARG ANDROID_SDK_URL=https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y python-pip git wget zip openjdk-8-jre-headless vim emacs nano curl sshpass ssh usbutils
|
||||
RUN pip install pandas
|
||||
RUN apt-get install -y python3 python3-pip git wget zip openjdk-8-jre-headless vim emacs nano curl sshpass ssh usbutils locales
|
||||
RUN pip3 install pandas
|
||||
|
||||
# Ensure we're using utf-8 as our default encoding
|
||||
RUN locale-gen en_US.UTF-8
|
||||
ENV LANG en_US.UTF-8
|
||||
ENV LANGUAGE en_US:en
|
||||
ENV LC_ALL en_US.UTF-8
|
||||
|
||||
# Let's get the two repos we need, and install them
|
||||
RUN git clone -v https://github.com/ARM-software/devlib.git /tmp/devlib && cd /tmp/devlib && git checkout $DEVLIB_REF && python setup.py install
|
||||
RUN git clone -v https://github.com/ARM-software/workload-automation.git /tmp/wa && cd /tmp/wa && git checkout $WA_REF && python setup.py install
|
||||
RUN git clone -v https://github.com/ARM-software/devlib.git /tmp/devlib && cd /tmp/devlib && git checkout $DEVLIB_REF && python3 setup.py install && pip3 install .[full]
|
||||
RUN git clone -v https://github.com/ARM-software/workload-automation.git /tmp/wa && cd /tmp/wa && git checkout $WA_REF && python3 setup.py install && pip3 install .[all]
|
||||
|
||||
# Clean-up
|
||||
RUN rm -R /tmp/devlib /tmp/wa
|
||||
|
22
requirements.txt
Normal file
22
requirements.txt
Normal file
@ -0,0 +1,22 @@
|
||||
certifi==2019.6.16
|
||||
chardet==3.0.4
|
||||
colorama==0.4.1
|
||||
enum34==1.1.6; python_version<"3"
|
||||
devlib==1.1.2
|
||||
future==0.17.1
|
||||
idna==2.8
|
||||
Louie-latest==1.3.1
|
||||
nose==1.3.7
|
||||
numpy==1.16.4
|
||||
pandas==0.24.2
|
||||
pexpect==4.7.0
|
||||
ptyprocess==0.6.0
|
||||
pyserial==3.4
|
||||
python-dateutil==2.8.0
|
||||
pytz==2019.1
|
||||
PyYAML==5.1.1
|
||||
requests==2.22.0
|
||||
six==1.12.0
|
||||
urllib3==1.25.3
|
||||
wlauto==3.1.4
|
||||
wrapt==1.11.2
|
30
setup.py
30
setup.py
@ -29,7 +29,8 @@ except ImportError:
|
||||
wa_dir = os.path.join(os.path.dirname(__file__), 'wa')
|
||||
|
||||
sys.path.insert(0, os.path.join(wa_dir, 'framework'))
|
||||
from version import get_wa_version, get_wa_version_with_commit
|
||||
from version import (get_wa_version, get_wa_version_with_commit,
|
||||
format_version, required_devlib_version)
|
||||
|
||||
# happens if falling back to distutils
|
||||
warnings.filterwarnings('ignore', "Unknown distribution option: 'install_requires'")
|
||||
@ -61,9 +62,14 @@ for root, dirs, files in os.walk(wa_dir):
|
||||
|
||||
scripts = [os.path.join('scripts', s) for s in os.listdir('scripts')]
|
||||
|
||||
with open("README.rst", "r") as fh:
|
||||
long_description = fh.read()
|
||||
|
||||
devlib_version = format_version(required_devlib_version)
|
||||
params = dict(
|
||||
name='wlauto',
|
||||
description='A framework for automating workload execution and measurement collection on ARM devices.',
|
||||
long_description=long_description,
|
||||
version=get_wa_version_with_commit(),
|
||||
packages=packages,
|
||||
package_data=data_files,
|
||||
@ -74,31 +80,34 @@ params = dict(
|
||||
maintainer='ARM Architecture & Technology Device Lab',
|
||||
maintainer_email='workload-automation@arm.com',
|
||||
setup_requires=[
|
||||
'numpy'
|
||||
'numpy<=1.16.4; python_version<"3"',
|
||||
'numpy; python_version>="3"',
|
||||
],
|
||||
install_requires=[
|
||||
'python-dateutil', # converting between UTC and local time.
|
||||
'pexpect>=3.3', # Send/receive to/from device
|
||||
'pyserial', # Serial port interface
|
||||
'colorama', # Printing with colors
|
||||
'pyYAML', # YAML-formatted agenda parsing
|
||||
'pyYAML>=5.1b3', # YAML-formatted agenda parsing
|
||||
'requests', # Fetch assets over HTTP
|
||||
'devlib>=1.1.0', # Interacting with devices
|
||||
'devlib>={}'.format(devlib_version), # Interacting with devices
|
||||
'louie-latest', # callbacks dispatch
|
||||
'wrapt', # better decorators
|
||||
'pandas>=0.23.0', # Data analysis and manipulation
|
||||
'pandas>=0.23.0,<=0.24.2; python_version<"3"', # Data analysis and manipulation
|
||||
'pandas>=0.23.0; python_version>"3"', # Data analysis and manipulation
|
||||
'future', # Python 2-3 compatiblity
|
||||
],
|
||||
dependency_links=['https://github.com/ARM-software/devlib/tarball/master#egg=devlib-{}'.format(devlib_version)],
|
||||
extras_require={
|
||||
'other': ['jinja2'],
|
||||
'test': ['nose', 'mock'],
|
||||
'mongodb': ['pymongo'],
|
||||
'notify': ['notify2'],
|
||||
'doc': ['sphinx'],
|
||||
'doc': ['sphinx', 'sphinx_rtd_theme'],
|
||||
'postgres': ['psycopg2-binary'],
|
||||
'daq': ['daqpower'],
|
||||
},
|
||||
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||
classifiers=[
|
||||
'Development Status :: 4 - Beta',
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Console',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
@ -108,7 +117,7 @@ params = dict(
|
||||
)
|
||||
|
||||
all_extras = list(chain(iter(params['extras_require'].values())))
|
||||
params['extras_require']['everything'] = all_extras
|
||||
params['extras_require']['all'] = all_extras
|
||||
|
||||
|
||||
class sdist(orig_sdist):
|
||||
@ -122,7 +131,6 @@ class sdist(orig_sdist):
|
||||
orig_sdist.initialize_options(self)
|
||||
self.strip_commit = False
|
||||
|
||||
|
||||
def run(self):
|
||||
if self.strip_commit:
|
||||
self.distribution.get_version = get_wa_version
|
||||
|
@ -18,7 +18,6 @@
|
||||
# pylint: disable=R0201
|
||||
import os
|
||||
import sys
|
||||
import yaml
|
||||
from collections import defaultdict
|
||||
from unittest import TestCase
|
||||
|
||||
@ -31,6 +30,7 @@ os.environ['WA_USER_DIRECTORY'] = os.path.join(DATA_DIR, 'includes')
|
||||
from wa.framework.configuration.execution import ConfigManager
|
||||
from wa.framework.configuration.parsers import AgendaParser
|
||||
from wa.framework.exception import ConfigError
|
||||
from wa.utils.serializer import yaml
|
||||
from wa.utils.types import reset_all_counters
|
||||
|
||||
|
||||
@ -44,8 +44,6 @@ workloads:
|
||||
workload_parameters:
|
||||
test: 1
|
||||
"""
|
||||
invalid_agenda = yaml.load(invalid_agenda_text)
|
||||
invalid_agenda.name = 'invalid1'
|
||||
|
||||
duplicate_agenda_text = """
|
||||
global:
|
||||
@ -58,14 +56,10 @@ workloads:
|
||||
- id: "1"
|
||||
workload_name: benchmarkpi
|
||||
"""
|
||||
duplicate_agenda = yaml.load(duplicate_agenda_text)
|
||||
duplicate_agenda.name = 'invalid2'
|
||||
|
||||
short_agenda_text = """
|
||||
workloads: [antutu, dhrystone, benchmarkpi]
|
||||
"""
|
||||
short_agenda = yaml.load(short_agenda_text)
|
||||
short_agenda.name = 'short'
|
||||
|
||||
default_ids_agenda_text = """
|
||||
workloads:
|
||||
@ -78,8 +72,6 @@ workloads:
|
||||
cpus: 1
|
||||
- vellamo
|
||||
"""
|
||||
default_ids_agenda = yaml.load(default_ids_agenda_text)
|
||||
default_ids_agenda.name = 'default_ids'
|
||||
|
||||
sectioned_agenda_text = """
|
||||
sections:
|
||||
@ -102,8 +94,6 @@ sections:
|
||||
workloads:
|
||||
- memcpy
|
||||
"""
|
||||
sectioned_agenda = yaml.load(sectioned_agenda_text)
|
||||
sectioned_agenda.name = 'sectioned'
|
||||
|
||||
dup_sectioned_agenda_text = """
|
||||
sections:
|
||||
@ -116,8 +106,22 @@ sections:
|
||||
workloads:
|
||||
- memcpy
|
||||
"""
|
||||
dup_sectioned_agenda = yaml.load(dup_sectioned_agenda_text)
|
||||
dup_sectioned_agenda.name = 'dup-sectioned'
|
||||
|
||||
yaml_anchors_agenda_text = """
|
||||
workloads:
|
||||
- name: dhrystone
|
||||
params: &dhrystone_single_params
|
||||
cleanup_assets: true
|
||||
cpus: 0
|
||||
delay: 3
|
||||
duration: 0
|
||||
mloops: 10
|
||||
threads: 1
|
||||
- name: dhrystone
|
||||
params:
|
||||
<<: *dhrystone_single_params
|
||||
threads: 4
|
||||
"""
|
||||
|
||||
|
||||
class AgendaTest(TestCase):
|
||||
@ -132,6 +136,8 @@ class AgendaTest(TestCase):
|
||||
assert_equal(len(self.config.jobs_config.root_node.workload_entries), 4)
|
||||
|
||||
def test_duplicate_id(self):
|
||||
duplicate_agenda = yaml.load(duplicate_agenda_text)
|
||||
|
||||
try:
|
||||
self.parser.load(self.config, duplicate_agenda, 'test')
|
||||
except ConfigError as e:
|
||||
@ -140,6 +146,8 @@ class AgendaTest(TestCase):
|
||||
raise Exception('ConfigError was not raised for an agenda with duplicate ids.')
|
||||
|
||||
def test_yaml_missing_field(self):
|
||||
invalid_agenda = yaml.load(invalid_agenda_text)
|
||||
|
||||
try:
|
||||
self.parser.load(self.config, invalid_agenda, 'test')
|
||||
except ConfigError as e:
|
||||
@ -148,20 +156,26 @@ class AgendaTest(TestCase):
|
||||
raise Exception('ConfigError was not raised for an invalid agenda.')
|
||||
|
||||
def test_defaults(self):
|
||||
short_agenda = yaml.load(short_agenda_text)
|
||||
self.parser.load(self.config, short_agenda, 'test')
|
||||
|
||||
workload_entries = self.config.jobs_config.root_node.workload_entries
|
||||
assert_equal(len(workload_entries), 3)
|
||||
assert_equal(workload_entries[0].config['workload_name'], 'antutu')
|
||||
assert_equal(workload_entries[0].id, 'wk1')
|
||||
|
||||
def test_default_id_assignment(self):
|
||||
default_ids_agenda = yaml.load(default_ids_agenda_text)
|
||||
|
||||
self.parser.load(self.config, default_ids_agenda, 'test2')
|
||||
workload_entries = self.config.jobs_config.root_node.workload_entries
|
||||
assert_equal(workload_entries[0].id, 'wk2')
|
||||
assert_equal(workload_entries[3].id, 'wk3')
|
||||
|
||||
def test_sections(self):
|
||||
sectioned_agenda = yaml.load(sectioned_agenda_text)
|
||||
self.parser.load(self.config, sectioned_agenda, 'test')
|
||||
|
||||
root_node_workload_entries = self.config.jobs_config.root_node.workload_entries
|
||||
leaves = list(self.config.jobs_config.root_node.leaves())
|
||||
section1_workload_entries = leaves[0].workload_entries
|
||||
@ -171,8 +185,22 @@ class AgendaTest(TestCase):
|
||||
assert_true(section1_workload_entries[0].config['workload_parameters']['markers_enabled'])
|
||||
assert_equal(section2_workload_entries[0].config['workload_name'], 'antutu')
|
||||
|
||||
def test_yaml_anchors(self):
|
||||
yaml_anchors_agenda = yaml.load(yaml_anchors_agenda_text)
|
||||
self.parser.load(self.config, yaml_anchors_agenda, 'test')
|
||||
|
||||
workload_entries = self.config.jobs_config.root_node.workload_entries
|
||||
assert_equal(len(workload_entries), 2)
|
||||
assert_equal(workload_entries[0].config['workload_name'], 'dhrystone')
|
||||
assert_equal(workload_entries[0].config['workload_parameters']['threads'], 1)
|
||||
assert_equal(workload_entries[0].config['workload_parameters']['delay'], 3)
|
||||
assert_equal(workload_entries[1].config['workload_name'], 'dhrystone')
|
||||
assert_equal(workload_entries[1].config['workload_parameters']['threads'], 4)
|
||||
assert_equal(workload_entries[1].config['workload_parameters']['delay'], 3)
|
||||
|
||||
@raises(ConfigError)
|
||||
def test_dup_sections(self):
|
||||
dup_sectioned_agenda = yaml.load(dup_sectioned_agenda_text)
|
||||
self.parser.load(self.config, dup_sectioned_agenda, 'test')
|
||||
|
||||
@raises(ConfigError)
|
||||
|
@ -33,7 +33,7 @@ from wa.framework.target.descriptor import (TargetDescriptor, TargetDescription,
|
||||
create_target_description, add_description_for_target)
|
||||
from wa.framework.workload import (Workload, ApkWorkload, ApkUiautoWorkload,
|
||||
ApkReventWorkload, UIWorkload, UiautoWorkload,
|
||||
ReventWorkload)
|
||||
PackageHandler, ReventWorkload, TestPackageHandler)
|
||||
|
||||
|
||||
from wa.framework.version import get_wa_version, get_wa_version_with_commit
|
||||
|
@ -1,4 +1,4 @@
|
||||
--!VERSION!1.2!ENDVERSION!
|
||||
--!VERSION!1.3!ENDVERSION!
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
CREATE EXTENSION IF NOT EXISTS "lo";
|
||||
|
||||
@ -96,6 +96,7 @@ CREATE TABLE Targets (
|
||||
android_id text,
|
||||
_pod_version int,
|
||||
_pod_serialization_version int,
|
||||
system_id text,
|
||||
PRIMARY KEY (oid)
|
||||
);
|
||||
|
||||
@ -164,6 +165,7 @@ CREATE TABLE Artifacts (
|
||||
kind text,
|
||||
_pod_version int,
|
||||
_pod_serialization_version int,
|
||||
is_dir boolean,
|
||||
PRIMARY KEY (oid)
|
||||
);
|
||||
|
||||
|
@ -0,0 +1,3 @@
|
||||
ALTER TABLE targets ADD COLUMN system_id text;
|
||||
|
||||
ALTER TABLE artifacts ADD COLUMN is_dir boolean;
|
@ -7,3 +7,13 @@
|
||||
was done following an extended discussion and tests that verified
|
||||
that the savings in processing power were not enough to warrant
|
||||
the creation of a dedicated server or file handler.
|
||||
## 1.2
|
||||
- Rename the `resourcegetters` table to `resource_getters` for consistency.
|
||||
- Add Job and Run level classifiers.
|
||||
- Add missing android specific properties to targets.
|
||||
- Add new POD meta data to relevant tables.
|
||||
- Correct job column name from `retires` to `retry`.
|
||||
- Add missing run information.
|
||||
## 1.3
|
||||
- Add missing "system_id" field from TargetInfo.
|
||||
- Enable support for uploading Artifact that represent directories.
|
||||
|
@ -24,7 +24,7 @@ from wa.framework.configuration.core import (MetaConfiguration, RunConfiguration
|
||||
JobGenerator, settings)
|
||||
from wa.framework.configuration.parsers import ConfigParser
|
||||
from wa.framework.configuration.plugin_cache import PluginCache
|
||||
from wa.framework.exception import NotFoundError
|
||||
from wa.framework.exception import NotFoundError, ConfigError
|
||||
from wa.framework.job import Job
|
||||
from wa.utils import log
|
||||
from wa.utils.serializer import Podable
|
||||
@ -148,6 +148,9 @@ class ConfigManager(object):
|
||||
|
||||
def generate_jobs(self, context):
|
||||
job_specs = self.jobs_config.generate_job_specs(context.tm)
|
||||
if not job_specs:
|
||||
msg = 'No jobs available for running.'
|
||||
raise ConfigError(msg)
|
||||
exec_order = self.run_config.execution_order
|
||||
log.indent()
|
||||
for spec, i in permute_iterations(job_specs, exec_order):
|
||||
|
@ -16,19 +16,25 @@
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import warnings
|
||||
|
||||
import devlib
|
||||
try:
|
||||
from devlib.utils.version import version as installed_devlib_version
|
||||
except ImportError:
|
||||
installed_devlib_version = None
|
||||
|
||||
from wa.framework import pluginloader
|
||||
from wa.framework.command import init_argument_parser
|
||||
from wa.framework.configuration import settings
|
||||
from wa.framework.configuration.execution import ConfigManager
|
||||
from wa.framework.host import init_user_directory, init_config
|
||||
from wa.framework.exception import ConfigError
|
||||
from wa.framework.version import get_wa_version_with_commit
|
||||
from wa.framework.exception import ConfigError, HostError
|
||||
from wa.framework.version import (get_wa_version_with_commit, format_version,
|
||||
required_devlib_version)
|
||||
from wa.utils import log
|
||||
from wa.utils.doc import format_body
|
||||
|
||||
@ -64,6 +70,25 @@ def split_joined_options(argv):
|
||||
return output
|
||||
|
||||
|
||||
# Instead of presenting an obscure error due to a version mismatch explicitly warn the user.
|
||||
def check_devlib_version():
|
||||
if not installed_devlib_version or installed_devlib_version < required_devlib_version:
|
||||
msg = 'WA requires Devlib version >={}. Please update the currently installed version {}'
|
||||
raise HostError(msg.format(format_version(required_devlib_version), devlib.__version__))
|
||||
|
||||
|
||||
# If the default encoding is not UTF-8 warn the user as this may cause compatibility issues
|
||||
# when parsing files.
|
||||
def check_system_encoding():
|
||||
system_encoding = locale.getpreferredencoding()
|
||||
msg = 'System Encoding: {}'.format(system_encoding)
|
||||
if 'UTF-8' not in system_encoding:
|
||||
logger.warning(msg)
|
||||
logger.warning('To prevent encoding issues please use a locale setting which supports UTF-8')
|
||||
else:
|
||||
logger.debug(msg)
|
||||
|
||||
|
||||
def main():
|
||||
if not os.path.exists(settings.user_directory):
|
||||
init_user_directory()
|
||||
@ -102,6 +127,8 @@ def main():
|
||||
logger.debug('Version: {}'.format(get_wa_version_with_commit()))
|
||||
logger.debug('devlib version: {}'.format(devlib.__full_version__))
|
||||
logger.debug('Command Line: {}'.format(' '.join(sys.argv)))
|
||||
check_devlib_version()
|
||||
check_system_encoding()
|
||||
|
||||
# each command will add its own subparser
|
||||
subparsers = parser.add_subparsers(dest='command')
|
||||
|
@ -23,6 +23,8 @@ except ImportError:
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
import tempfile
|
||||
from collections import OrderedDict, defaultdict
|
||||
from copy import copy, deepcopy
|
||||
from datetime import datetime
|
||||
@ -145,9 +147,10 @@ class Output(object):
|
||||
if not os.path.exists(path):
|
||||
msg = 'Attempting to add non-existing artifact: {}'
|
||||
raise HostError(msg.format(path))
|
||||
is_dir = os.path.isdir(path)
|
||||
path = os.path.relpath(path, self.basepath)
|
||||
|
||||
self.result.add_artifact(name, path, kind, description, classifiers)
|
||||
self.result.add_artifact(name, path, kind, description, classifiers, is_dir)
|
||||
|
||||
def add_event(self, message):
|
||||
self.result.add_event(message)
|
||||
@ -346,6 +349,13 @@ class JobOutput(Output):
|
||||
self.spec = None
|
||||
self.reload()
|
||||
|
||||
@property
|
||||
def augmentations(self):
|
||||
job_augs = set([])
|
||||
for aug in self.spec.augmentations:
|
||||
job_augs.add(aug)
|
||||
return list(job_augs)
|
||||
|
||||
|
||||
class Result(Podable):
|
||||
|
||||
@ -378,9 +388,10 @@ class Result(Podable):
|
||||
logger.debug('Adding metric: {}'.format(metric))
|
||||
self.metrics.append(metric)
|
||||
|
||||
def add_artifact(self, name, path, kind, description=None, classifiers=None):
|
||||
def add_artifact(self, name, path, kind, description=None, classifiers=None,
|
||||
is_dir=False):
|
||||
artifact = Artifact(name, path, kind, description=description,
|
||||
classifiers=classifiers)
|
||||
classifiers=classifiers, is_dir=is_dir)
|
||||
logger.debug('Adding artifact: {}'.format(artifact))
|
||||
self.artifacts.append(artifact)
|
||||
|
||||
@ -516,7 +527,7 @@ class Artifact(Podable):
|
||||
|
||||
"""
|
||||
|
||||
_pod_serialization_version = 1
|
||||
_pod_serialization_version = 2
|
||||
|
||||
@staticmethod
|
||||
def from_pod(pod):
|
||||
@ -525,9 +536,11 @@ class Artifact(Podable):
|
||||
pod['kind'] = ArtifactType(pod['kind'])
|
||||
instance = Artifact(**pod)
|
||||
instance._pod_version = pod_version # pylint: disable =protected-access
|
||||
instance.is_dir = pod.pop('is_dir')
|
||||
return instance
|
||||
|
||||
def __init__(self, name, path, kind, description=None, classifiers=None):
|
||||
def __init__(self, name, path, kind, description=None, classifiers=None,
|
||||
is_dir=False):
|
||||
""""
|
||||
:param name: Name that uniquely identifies this artifact.
|
||||
:param path: The *relative* path of the artifact. Depending on the
|
||||
@ -543,7 +556,6 @@ class Artifact(Podable):
|
||||
:param classifiers: A set of key-value pairs to further classify this
|
||||
metric beyond current iteration (e.g. this can be
|
||||
used to identify sub-tests).
|
||||
|
||||
"""
|
||||
super(Artifact, self).__init__()
|
||||
self.name = name
|
||||
@ -555,11 +567,13 @@ class Artifact(Podable):
|
||||
raise ValueError(msg.format(kind, ARTIFACT_TYPES))
|
||||
self.description = description
|
||||
self.classifiers = classifiers or {}
|
||||
self.is_dir = is_dir
|
||||
|
||||
def to_pod(self):
|
||||
pod = super(Artifact, self).to_pod()
|
||||
pod.update(self.__dict__)
|
||||
pod['kind'] = str(self.kind)
|
||||
pod['is_dir'] = self.is_dir
|
||||
return pod
|
||||
|
||||
@staticmethod
|
||||
@ -567,11 +581,17 @@ class Artifact(Podable):
|
||||
pod['_pod_version'] = pod.get('_pod_version', 1)
|
||||
return pod
|
||||
|
||||
@staticmethod
|
||||
def _pod_upgrade_v2(pod):
|
||||
pod['is_dir'] = pod.get('is_dir', False)
|
||||
return pod
|
||||
|
||||
def __str__(self):
|
||||
return self.path
|
||||
|
||||
def __repr__(self):
|
||||
return '{} ({}): {}'.format(self.name, self.kind, self.path)
|
||||
ft = 'dir' if self.is_dir else 'file'
|
||||
return '{} ({}) ({}): {}'.format(self.name, ft, self.kind, self.path)
|
||||
|
||||
|
||||
class Metric(Podable):
|
||||
@ -602,6 +622,12 @@ class Metric(Podable):
|
||||
instance._pod_version = pod_version # pylint: disable =protected-access
|
||||
return instance
|
||||
|
||||
@property
|
||||
def label(self):
|
||||
parts = ['{}={}'.format(n, v) for n, v in self.classifiers.items()]
|
||||
parts.insert(0, self.name)
|
||||
return '/'.join(parts)
|
||||
|
||||
def __init__(self, name, value, units=None, lower_is_better=False,
|
||||
classifiers=None):
|
||||
super(Metric, self).__init__()
|
||||
@ -798,6 +824,19 @@ class DatabaseOutput(Output):
|
||||
|
||||
def get_artifact_path(self, name):
|
||||
artifact = self.get_artifact(name)
|
||||
if artifact.is_dir:
|
||||
return self._read_dir_artifact(artifact)
|
||||
else:
|
||||
return self._read_file_artifact(artifact)
|
||||
|
||||
def _read_dir_artifact(self, artifact):
|
||||
artifact_path = tempfile.mkdtemp(prefix='wa_')
|
||||
with tarfile.open(fileobj=self.conn.lobject(int(artifact.path), mode='b'), mode='r|gz') as tar_file:
|
||||
tar_file.extractall(artifact_path)
|
||||
self.conn.commit()
|
||||
return artifact_path
|
||||
|
||||
def _read_file_artifact(self, artifact):
|
||||
artifact = StringIO(self.conn.lobject(int(artifact.path)).read())
|
||||
self.conn.commit()
|
||||
return artifact
|
||||
@ -886,13 +925,15 @@ class DatabaseOutput(Output):
|
||||
|
||||
def _get_artifacts(self):
|
||||
columns = ['artifacts.name', 'artifacts.description', 'artifacts.kind',
|
||||
('largeobjects.lo_oid', 'path'), 'artifacts.oid',
|
||||
('largeobjects.lo_oid', 'path'), 'artifacts.oid', 'artifacts.is_dir',
|
||||
'artifacts._pod_version', 'artifacts._pod_serialization_version']
|
||||
tables = ['largeobjects', 'artifacts']
|
||||
joins = [('classifiers', 'classifiers.artifact_oid = artifacts.oid')]
|
||||
conditions = ['artifacts.{}_oid = \'{}\''.format(self.kind, self.oid),
|
||||
'artifacts.large_object_uuid = largeobjects.oid',
|
||||
'artifacts.job_oid IS NULL']
|
||||
'artifacts.large_object_uuid = largeobjects.oid']
|
||||
# If retrieving run level artifacts we want those that don't also belong to a job
|
||||
if self.kind == 'run':
|
||||
conditions.append('artifacts.job_oid IS NULL')
|
||||
pod = self._read_db(columns, tables, conditions, joins)
|
||||
for artifact in pod:
|
||||
artifact['path'] = str(artifact['path'])
|
||||
@ -907,8 +948,9 @@ class DatabaseOutput(Output):
|
||||
|
||||
def kernel_config_from_db(raw):
|
||||
kernel_config = {}
|
||||
for k, v in zip(raw[0], raw[1]):
|
||||
kernel_config[k] = v
|
||||
if raw:
|
||||
for k, v in zip(raw[0], raw[1]):
|
||||
kernel_config[k] = v
|
||||
return kernel_config
|
||||
|
||||
|
||||
@ -944,7 +986,8 @@ class RunDatabaseOutput(DatabaseOutput, RunOutputCommon):
|
||||
def _db_targetfile(self):
|
||||
columns = ['os', 'is_rooted', 'target', 'abi', 'cpus', 'os_version',
|
||||
'hostid', 'hostname', 'kernel_version', 'kernel_release',
|
||||
'kernel_sha1', 'kernel_config', 'sched_features',
|
||||
'kernel_sha1', 'kernel_config', 'sched_features', 'page_size_kb',
|
||||
'system_id', 'screen_resolution', 'prop', 'android_id',
|
||||
'_pod_version', '_pod_serialization_version']
|
||||
tables = ['targets']
|
||||
conditions = ['targets.run_oid = \'{}\''.format(self.oid)]
|
||||
@ -997,6 +1040,7 @@ class RunDatabaseOutput(DatabaseOutput, RunOutputCommon):
|
||||
jobs = self._read_db(columns, tables, conditions)
|
||||
|
||||
for job in jobs:
|
||||
job['augmentations'] = self._get_job_augmentations(job['oid'])
|
||||
job['workload_parameters'] = workload_params.pop(job['oid'], {})
|
||||
job['runtime_parameters'] = runtime_params.pop(job['oid'], {})
|
||||
job.pop('oid')
|
||||
@ -1160,6 +1204,15 @@ class RunDatabaseOutput(DatabaseOutput, RunOutputCommon):
|
||||
logger.debug('Failed to deserialize job_oid:{}-"{}":"{}"'.format(job_oid, k, v))
|
||||
return parm_dict
|
||||
|
||||
def _get_job_augmentations(self, job_oid):
|
||||
columns = ['jobs_augs.augmentation_oid', 'augmentations.name',
|
||||
'augmentations.oid', 'jobs_augs.job_oid']
|
||||
tables = ['jobs_augs', 'augmentations']
|
||||
conditions = ['jobs_augs.job_oid = \'{}\''.format(job_oid),
|
||||
'jobs_augs.augmentation_oid = augmentations.oid']
|
||||
augmentations = self._read_db(columns, tables, conditions)
|
||||
return [aug['name'] for aug in augmentations]
|
||||
|
||||
def _list_runs(self):
|
||||
columns = ['runs.run_uuid', 'runs.run_name', 'runs.project',
|
||||
'runs.project_stage', 'runs.status', 'runs.start_time', 'runs.end_time']
|
||||
@ -1211,3 +1264,11 @@ class JobDatabaseOutput(DatabaseOutput):
|
||||
|
||||
def __str__(self):
|
||||
return '{}-{}-{}'.format(self.id, self.label, self.iteration)
|
||||
|
||||
@property
|
||||
def augmentations(self):
|
||||
job_augs = set([])
|
||||
if self.spec:
|
||||
for aug in self.spec.augmentations:
|
||||
job_augs.add(aug)
|
||||
return list(job_augs)
|
||||
|
@ -24,7 +24,7 @@ from wa.framework.exception import ResourceError
|
||||
from wa.framework.configuration import settings
|
||||
from wa.utils import log
|
||||
from wa.utils.misc import get_object_name
|
||||
from wa.utils.types import enum, list_or_string, prioritylist
|
||||
from wa.utils.types import enum, list_or_string, prioritylist, version_tuple
|
||||
|
||||
|
||||
SourcePriority = enum(['package', 'remote', 'lan', 'local',
|
||||
@ -142,10 +142,12 @@ class ApkFile(Resource):
|
||||
|
||||
def __init__(self, owner, variant=None, version=None,
|
||||
package=None, uiauto=False, exact_abi=False,
|
||||
supported_abi=None):
|
||||
supported_abi=None, min_version=None, max_version=None):
|
||||
super(ApkFile, self).__init__(owner)
|
||||
self.variant = variant
|
||||
self.version = version
|
||||
self.max_version = max_version
|
||||
self.min_version = min_version
|
||||
self.package = package
|
||||
self.uiauto = uiauto
|
||||
self.exact_abi = exact_abi
|
||||
@ -158,21 +160,25 @@ class ApkFile(Resource):
|
||||
def match(self, path):
|
||||
name_matches = True
|
||||
version_matches = True
|
||||
version_range_matches = True
|
||||
package_matches = True
|
||||
abi_matches = True
|
||||
uiauto_matches = uiauto_test_matches(path, self.uiauto)
|
||||
if self.version is not None:
|
||||
if self.version:
|
||||
version_matches = apk_version_matches(path, self.version)
|
||||
if self.variant is not None:
|
||||
if self.max_version or self.min_version:
|
||||
version_range_matches = apk_version_matches_range(path, self.min_version,
|
||||
self.max_version)
|
||||
if self.variant:
|
||||
name_matches = file_name_matches(path, self.variant)
|
||||
if self.package is not None:
|
||||
if self.package:
|
||||
package_matches = package_name_matches(path, self.package)
|
||||
if self.supported_abi is not None:
|
||||
if self.supported_abi:
|
||||
abi_matches = apk_abi_matches(path, self.supported_abi,
|
||||
self.exact_abi)
|
||||
return name_matches and version_matches and \
|
||||
uiauto_matches and package_matches and \
|
||||
abi_matches
|
||||
version_range_matches and uiauto_matches \
|
||||
and package_matches and abi_matches
|
||||
|
||||
def __str__(self):
|
||||
text = '<{}\'s apk'.format(self.owner)
|
||||
@ -273,15 +279,40 @@ class ResourceResolver(object):
|
||||
|
||||
|
||||
def apk_version_matches(path, version):
|
||||
version = list_or_string(version)
|
||||
info = ApkInfo(path)
|
||||
if info.version_name == version or info.version_code == version:
|
||||
return True
|
||||
return loose_version_matching(version, info.version_name)
|
||||
for v in version:
|
||||
if info.version_name == v or info.version_code == v:
|
||||
return True
|
||||
if loose_version_matching(v, info.version_name):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def apk_version_matches_range(path, min_version=None, max_version=None):
|
||||
info = ApkInfo(path)
|
||||
return range_version_matching(info.version_name, min_version, max_version)
|
||||
|
||||
|
||||
def range_version_matching(apk_version, min_version=None, max_version=None):
|
||||
if not apk_version:
|
||||
return False
|
||||
apk_version = version_tuple(apk_version)
|
||||
|
||||
if max_version:
|
||||
max_version = version_tuple(max_version)
|
||||
if apk_version > max_version:
|
||||
return False
|
||||
if min_version:
|
||||
min_version = version_tuple(min_version)
|
||||
if apk_version < min_version:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def loose_version_matching(config_version, apk_version):
|
||||
config_version = config_version.split('.')
|
||||
apk_version = apk_version.split('.')
|
||||
config_version = version_tuple(config_version)
|
||||
apk_version = version_tuple(apk_version)
|
||||
|
||||
if len(apk_version) < len(config_version):
|
||||
return False # More specific version requested than available
|
||||
|
@ -53,9 +53,9 @@ def kernel_version_from_pod(pod):
|
||||
|
||||
def kernel_config_from_pod(pod):
|
||||
config = KernelConfig('')
|
||||
config._config = pod['kernel_config']
|
||||
config.typed_config._config = pod['kernel_config']
|
||||
lines = []
|
||||
for key, value in config._config.items():
|
||||
for key, value in config.items():
|
||||
if value == 'n':
|
||||
lines.append('# {} is not set'.format(key))
|
||||
else:
|
||||
@ -313,7 +313,7 @@ def cache_target_info(target_info, overwrite=False):
|
||||
|
||||
class TargetInfo(Podable):
|
||||
|
||||
_pod_serialization_version = 2
|
||||
_pod_serialization_version = 4
|
||||
|
||||
@staticmethod
|
||||
def from_pod(pod):
|
||||
@ -401,3 +401,15 @@ class TargetInfo(Podable):
|
||||
pod['page_size_kb'] = pod.get('page_size_kb')
|
||||
pod['_pod_version'] = pod.get('format_version', 0)
|
||||
return pod
|
||||
|
||||
@staticmethod
|
||||
def _pod_upgrade_v3(pod):
|
||||
config = {}
|
||||
for key, value in pod['kernel_config'].items():
|
||||
config[key.upper()] = value
|
||||
pod['kernel_config'] = config
|
||||
return pod
|
||||
|
||||
@staticmethod
|
||||
def _pod_upgrade_v4(pod):
|
||||
return TargetInfo._pod_upgrade_v3(pod)
|
||||
|
@ -45,7 +45,7 @@ public class BaseUiAutomation {
|
||||
|
||||
public enum FindByCriteria { BY_ID, BY_TEXT, BY_DESC };
|
||||
public enum Direction { UP, DOWN, LEFT, RIGHT, NULL };
|
||||
public enum ScreenOrientation { RIGHT, NATURAL, LEFT };
|
||||
public enum ScreenOrientation { RIGHT, NATURAL, LEFT, PORTRAIT, LANDSCAPE };
|
||||
public enum PinchType { IN, OUT, NULL };
|
||||
|
||||
// Time in milliseconds
|
||||
@ -176,6 +176,8 @@ public class BaseUiAutomation {
|
||||
}
|
||||
|
||||
public void setScreenOrientation(ScreenOrientation orientation) throws Exception {
|
||||
int width = mDevice.getDisplayWidth();
|
||||
int height = mDevice.getDisplayHeight();
|
||||
switch (orientation) {
|
||||
case RIGHT:
|
||||
mDevice.setOrientationRight();
|
||||
@ -186,6 +188,30 @@ public class BaseUiAutomation {
|
||||
case LEFT:
|
||||
mDevice.setOrientationLeft();
|
||||
break;
|
||||
case LANDSCAPE:
|
||||
if (mDevice.isNaturalOrientation()){
|
||||
if (height > width){
|
||||
mDevice.setOrientationRight();
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (height > width){
|
||||
mDevice.setOrientationNatural();
|
||||
}
|
||||
}
|
||||
break;
|
||||
case PORTRAIT:
|
||||
if (mDevice.isNaturalOrientation()){
|
||||
if (height < width){
|
||||
mDevice.setOrientationRight();
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (height < width){
|
||||
mDevice.setOrientationNatural();
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new Exception("No orientation specified");
|
||||
}
|
||||
|
Binary file not shown.
@ -19,15 +19,23 @@ from collections import namedtuple
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
|
||||
VersionTuple = namedtuple('Version', ['major', 'minor', 'revision'])
|
||||
VersionTuple = namedtuple('Version', ['major', 'minor', 'revision', 'dev'])
|
||||
|
||||
version = VersionTuple(3, 1, 1)
|
||||
version = VersionTuple(3, 1, 4, '')
|
||||
|
||||
required_devlib_version = VersionTuple(1, 1, 2, '')
|
||||
|
||||
|
||||
def format_version(v):
|
||||
version_string = '{}.{}.{}'.format(
|
||||
v.major, v.minor, v.revision)
|
||||
if v.dev:
|
||||
version_string += '.{}'.format(v.dev)
|
||||
return version_string
|
||||
|
||||
|
||||
def get_wa_version():
|
||||
version_string = '{}.{}.{}'.format(
|
||||
version.major, version.minor, version.revision)
|
||||
return version_string
|
||||
return format_version(version)
|
||||
|
||||
|
||||
def get_wa_version_with_commit():
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright 2014-2018 ARM Limited
|
||||
# Copyright 2014-2019 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -14,15 +14,22 @@
|
||||
#
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
|
||||
try:
|
||||
from shlex import quote
|
||||
except ImportError:
|
||||
from pipes import quote
|
||||
|
||||
from devlib.utils.android import ApkInfo
|
||||
|
||||
from wa.framework.plugin import TargetedPlugin, Parameter
|
||||
from wa.framework.resource import (ApkFile, ReventFile,
|
||||
File, loose_version_matching)
|
||||
File, loose_version_matching,
|
||||
range_version_matching)
|
||||
from wa.framework.exception import WorkloadError, ConfigError
|
||||
from wa.utils.types import ParameterDict
|
||||
from wa.utils.types import ParameterDict, list_or_string, version_tuple
|
||||
from wa.utils.revent import ReventRecorder
|
||||
from wa.utils.exec_control import once_per_instance
|
||||
|
||||
@ -42,9 +49,15 @@ class Workload(TargetedPlugin):
|
||||
aliases=['clean_up'],
|
||||
default=True,
|
||||
description="""
|
||||
If ``True``, if assets are deployed as part of the workload they
|
||||
will be removed again from the device as part of finalize.
|
||||
""")
|
||||
If ``True``, assets that are deployed or created as part of the
|
||||
workload will be removed again from the device.
|
||||
"""),
|
||||
Parameter('uninstall', kind=bool,
|
||||
default=True,
|
||||
description="""
|
||||
If ``True``, executables that are installed to the device
|
||||
as part of the workload will be uninstalled again.
|
||||
"""),
|
||||
]
|
||||
|
||||
# Set this to True to mark that this workload poses a risk of exposing
|
||||
@ -73,7 +86,7 @@ class Workload(TargetedPlugin):
|
||||
|
||||
supported_platforms = getattr(self, 'supported_platforms', [])
|
||||
if supported_platforms and self.target.os not in supported_platforms:
|
||||
msg = 'Supported platforms for "{}" are "{}", attemping to run on "{}"'
|
||||
msg = 'Supported platforms for "{}" are "{}", attempting to run on "{}"'
|
||||
raise WorkloadError(msg.format(self.name, ' '.join(self.supported_platforms),
|
||||
self.target.os))
|
||||
|
||||
@ -174,6 +187,8 @@ class ApkWorkload(Workload):
|
||||
# Times are in seconds
|
||||
loading_time = 10
|
||||
package_names = []
|
||||
supported_versions = []
|
||||
activity = None
|
||||
view = None
|
||||
clear_data_on_reset = True
|
||||
|
||||
@ -198,6 +213,16 @@ class ApkWorkload(Workload):
|
||||
description="""
|
||||
The version of the package to be used.
|
||||
"""),
|
||||
Parameter('max_version', kind=str,
|
||||
default=None,
|
||||
description="""
|
||||
The maximum version of the package to be used.
|
||||
"""),
|
||||
Parameter('min_version', kind=str,
|
||||
default=None,
|
||||
description="""
|
||||
The minimum version of the package to be used.
|
||||
"""),
|
||||
Parameter('variant', kind=str,
|
||||
default=None,
|
||||
description="""
|
||||
@ -217,6 +242,7 @@ class ApkWorkload(Workload):
|
||||
"""),
|
||||
Parameter('uninstall', kind=bool,
|
||||
default=False,
|
||||
override=True,
|
||||
description="""
|
||||
If ``True``, will uninstall workload\'s APK as part of teardown.'
|
||||
"""),
|
||||
@ -235,6 +261,12 @@ class ApkWorkload(Workload):
|
||||
will fall back to the version on the target if available. If
|
||||
``False`` then the version on the target is preferred instead.
|
||||
"""),
|
||||
Parameter('view', kind=str, default=None, merge=True,
|
||||
description="""
|
||||
Manually override the 'View' of the workload for use with
|
||||
instruments such as the ``fps`` instrument. If not specified,
|
||||
a workload dependant 'View' will be automatically generated.
|
||||
"""),
|
||||
]
|
||||
|
||||
@property
|
||||
@ -249,22 +281,40 @@ class ApkWorkload(Workload):
|
||||
raise ConfigError('Target does not appear to support Android')
|
||||
|
||||
super(ApkWorkload, self).__init__(target, **kwargs)
|
||||
|
||||
if self.activity is not None and '.' not in self.activity:
|
||||
# If we're receiving just the activity name, it's taken relative to
|
||||
# the package namespace:
|
||||
self.activity = '.' + self.activity
|
||||
|
||||
self.apk = PackageHandler(self,
|
||||
package_name=self.package_name,
|
||||
variant=self.variant,
|
||||
strict=self.strict,
|
||||
version=self.version,
|
||||
version=self.version or self.supported_versions,
|
||||
force_install=self.force_install,
|
||||
install_timeout=self.install_timeout,
|
||||
uninstall=self.uninstall,
|
||||
exact_abi=self.exact_abi,
|
||||
prefer_host_package=self.prefer_host_package,
|
||||
clear_data_on_reset=self.clear_data_on_reset)
|
||||
clear_data_on_reset=self.clear_data_on_reset,
|
||||
activity=self.activity,
|
||||
min_version=self.min_version,
|
||||
max_version=self.max_version)
|
||||
|
||||
def validate(self):
|
||||
if self.min_version and self.max_version:
|
||||
if version_tuple(self.min_version) > version_tuple(self.max_version):
|
||||
msg = 'Cannot specify min version ({}) greater than max version ({})'
|
||||
raise ConfigError(msg.format(self.min_version, self.max_version))
|
||||
|
||||
@once_per_instance
|
||||
def initialize(self, context):
|
||||
super(ApkWorkload, self).initialize(context)
|
||||
self.apk.initialize(context)
|
||||
# pylint: disable=access-member-before-definition, attribute-defined-outside-init
|
||||
if self.version is None:
|
||||
self.version = self.apk.apk_info.version_name
|
||||
if self.view is None:
|
||||
self.view = 'SurfaceView - {}/{}'.format(self.apk.package,
|
||||
self.apk.activity)
|
||||
@ -327,7 +377,8 @@ class ApkUIWorkload(ApkWorkload):
|
||||
@once_per_instance
|
||||
def finalize(self, context):
|
||||
super(ApkUIWorkload, self).finalize(context)
|
||||
self.gui.remove()
|
||||
if self.cleanup_assets:
|
||||
self.gui.remove()
|
||||
|
||||
|
||||
class ApkUiautoWorkload(ApkUIWorkload):
|
||||
@ -365,7 +416,6 @@ class ApkReventWorkload(ApkUIWorkload):
|
||||
|
||||
def __init__(self, target, **kwargs):
|
||||
super(ApkReventWorkload, self).__init__(target, **kwargs)
|
||||
self.apk = PackageHandler(self)
|
||||
self.gui = ReventGUI(self, target,
|
||||
self.setup_timeout,
|
||||
self.run_timeout,
|
||||
@ -407,7 +457,8 @@ class UIWorkload(Workload):
|
||||
@once_per_instance
|
||||
def finalize(self, context):
|
||||
super(UIWorkload, self).finalize(context)
|
||||
self.gui.remove()
|
||||
if self.cleanup_assets:
|
||||
self.gui.remove()
|
||||
|
||||
|
||||
class UiautoWorkload(UIWorkload):
|
||||
@ -603,12 +654,12 @@ class ReventGUI(object):
|
||||
if self.revent_teardown_file:
|
||||
self.revent_recorder.replay(self.on_target_teardown_revent,
|
||||
timeout=self.teardown_timeout)
|
||||
|
||||
def remove(self):
|
||||
self.target.remove(self.on_target_setup_revent)
|
||||
self.target.remove(self.on_target_run_revent)
|
||||
self.target.remove(self.on_target_extract_results_revent)
|
||||
self.target.remove(self.on_target_teardown_revent)
|
||||
|
||||
def remove(self):
|
||||
self.revent_recorder.remove()
|
||||
|
||||
def _check_revent_files(self):
|
||||
@ -637,18 +688,24 @@ class PackageHandler(object):
|
||||
|
||||
@property
|
||||
def activity(self):
|
||||
if self._activity:
|
||||
return self._activity
|
||||
if self.apk_info is None:
|
||||
return None
|
||||
return self.apk_info.activity
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
def __init__(self, owner, install_timeout=300, version=None, variant=None,
|
||||
package_name=None, strict=False, force_install=False, uninstall=False,
|
||||
exact_abi=False, prefer_host_package=True, clear_data_on_reset=True):
|
||||
exact_abi=False, prefer_host_package=True, clear_data_on_reset=True,
|
||||
activity=None, min_version=None, max_version=None):
|
||||
self.logger = logging.getLogger('apk')
|
||||
self.owner = owner
|
||||
self.target = self.owner.target
|
||||
self.install_timeout = install_timeout
|
||||
self.version = version
|
||||
self.min_version = min_version
|
||||
self.max_version = max_version
|
||||
self.variant = variant
|
||||
self.package_name = package_name
|
||||
self.strict = strict
|
||||
@ -657,6 +714,7 @@ class PackageHandler(object):
|
||||
self.exact_abi = exact_abi
|
||||
self.prefer_host_package = prefer_host_package
|
||||
self.clear_data_on_reset = clear_data_on_reset
|
||||
self._activity = activity
|
||||
self.supported_abi = self.target.supported_abi
|
||||
self.apk_file = None
|
||||
self.apk_info = None
|
||||
@ -714,7 +772,9 @@ class PackageHandler(object):
|
||||
version=self.version,
|
||||
package=self.package_name,
|
||||
exact_abi=self.exact_abi,
|
||||
supported_abi=self.supported_abi),
|
||||
supported_abi=self.supported_abi,
|
||||
min_version=self.min_version,
|
||||
max_version=self.max_version),
|
||||
strict=self.strict)
|
||||
else:
|
||||
available_packages = []
|
||||
@ -724,47 +784,57 @@ class PackageHandler(object):
|
||||
version=self.version,
|
||||
package=package,
|
||||
exact_abi=self.exact_abi,
|
||||
supported_abi=self.supported_abi),
|
||||
supported_abi=self.supported_abi,
|
||||
min_version=self.min_version,
|
||||
max_version=self.max_version),
|
||||
strict=self.strict)
|
||||
if apk_file:
|
||||
available_packages.append(apk_file)
|
||||
if len(available_packages) == 1:
|
||||
self.apk_file = available_packages[0]
|
||||
elif len(available_packages) > 1:
|
||||
msg = 'Multiple matching packages found for "{}" on host: {}'
|
||||
self.error_msg = msg.format(self.owner, available_packages)
|
||||
self.error_msg = self._get_package_error_msg('host')
|
||||
|
||||
def resolve_package_from_target(self): # pylint: disable=too-many-branches
|
||||
self.logger.debug('Resolving package on target')
|
||||
found_package = None
|
||||
if self.package_name:
|
||||
if not self.target.package_is_installed(self.package_name):
|
||||
return
|
||||
else:
|
||||
installed_versions = [self.package_name]
|
||||
else:
|
||||
installed_versions = []
|
||||
for package in self.owner.package_names:
|
||||
if self.target.package_is_installed(package):
|
||||
installed_versions.append(package)
|
||||
|
||||
if self.version:
|
||||
matching_packages = []
|
||||
for package in installed_versions:
|
||||
package_version = self.target.get_package_version(package)
|
||||
if loose_version_matching(self.version, package_version):
|
||||
if self.version or self.min_version or self.max_version:
|
||||
matching_packages = []
|
||||
for package in installed_versions:
|
||||
package_version = self.target.get_package_version(package)
|
||||
if self.version:
|
||||
for v in list_or_string(self.version):
|
||||
if loose_version_matching(v, package_version):
|
||||
matching_packages.append(package)
|
||||
else:
|
||||
if range_version_matching(package_version, self.min_version,
|
||||
self.max_version):
|
||||
matching_packages.append(package)
|
||||
if len(matching_packages) == 1:
|
||||
self.package_name = matching_packages[0]
|
||||
elif len(matching_packages) > 1:
|
||||
msg = 'Multiple matches for version "{}" found on device.'
|
||||
self.error_msg = msg.format(self.version)
|
||||
else:
|
||||
if len(installed_versions) == 1:
|
||||
self.package_name = installed_versions[0]
|
||||
elif len(installed_versions) > 1:
|
||||
self.error_msg = 'Package version not set and multiple versions found on device.'
|
||||
|
||||
if self.package_name:
|
||||
if len(matching_packages) == 1:
|
||||
found_package = matching_packages[0]
|
||||
elif len(matching_packages) > 1:
|
||||
self.error_msg = self._get_package_error_msg('device')
|
||||
else:
|
||||
if len(installed_versions) == 1:
|
||||
found_package = installed_versions[0]
|
||||
elif len(installed_versions) > 1:
|
||||
self.error_msg = 'Package version not set and multiple versions found on device.'
|
||||
if found_package:
|
||||
self.logger.debug('Found matching package on target; Pulling to host.')
|
||||
self.apk_file = self.pull_apk(self.package_name)
|
||||
self.apk_file = self.pull_apk(found_package)
|
||||
self.package_name = found_package
|
||||
|
||||
def initialize_package(self, context):
|
||||
installed_version = self.target.get_package_version(self.apk_info.package)
|
||||
@ -794,11 +864,11 @@ class PackageHandler(object):
|
||||
self.apk_version = host_version
|
||||
|
||||
def start_activity(self):
|
||||
if not self.apk_info.activity:
|
||||
if not self.activity:
|
||||
cmd = 'am start -W {}'.format(self.apk_info.package)
|
||||
else:
|
||||
cmd = 'am start -W -n {}/{}'.format(self.apk_info.package,
|
||||
self.apk_info.activity)
|
||||
self.activity)
|
||||
output = self.target.execute(cmd)
|
||||
if 'Error:' in output:
|
||||
# this will dismiss any error dialogs
|
||||
@ -842,3 +912,76 @@ class PackageHandler(object):
|
||||
self.target.execute('am force-stop {}'.format(self.apk_info.package))
|
||||
if self.uninstall:
|
||||
self.target.uninstall_package(self.apk_info.package)
|
||||
|
||||
def _get_package_error_msg(self, location):
|
||||
if self.version:
|
||||
msg = 'Multiple matches for "{version}" found on {location}.'
|
||||
elif self.min_version and self.max_version:
|
||||
msg = 'Multiple matches between versions "{min_version}" and "{max_version}" found on {location}.'
|
||||
elif self.max_version:
|
||||
msg = 'Multiple matches less than or equal to "{max_version}" found on {location}.'
|
||||
elif self.min_version:
|
||||
msg = 'Multiple matches greater or equal to "{min_version}" found on {location}.'
|
||||
else:
|
||||
msg = ''
|
||||
return msg.format(version=self.version, min_version=self.min_version,
|
||||
max_version=self.max_version, location=location)
|
||||
|
||||
|
||||
class TestPackageHandler(PackageHandler):
|
||||
"""Class wrapping an APK used through ``am instrument``.
|
||||
"""
|
||||
def __init__(self, owner, instrument_args=None, raw_output=False,
|
||||
instrument_wait=True, no_hidden_api_checks=False,
|
||||
*args, **kwargs):
|
||||
if instrument_args is None:
|
||||
instrument_args = {}
|
||||
super(TestPackageHandler, self).__init__(owner, *args, **kwargs)
|
||||
self.raw = raw_output
|
||||
self.args = instrument_args
|
||||
self.wait = instrument_wait
|
||||
self.no_checks = no_hidden_api_checks
|
||||
|
||||
self.cmd = ''
|
||||
self.instrument_thread = None
|
||||
self._instrument_output = None
|
||||
|
||||
def setup(self, context):
|
||||
self.initialize_package(context)
|
||||
|
||||
words = ['am', 'instrument']
|
||||
if self.raw:
|
||||
words.append('-r')
|
||||
if self.wait:
|
||||
words.append('-w')
|
||||
if self.no_checks:
|
||||
words.append('--no-hidden-api-checks')
|
||||
for k, v in self.args.items():
|
||||
words.extend(['-e', str(k), str(v)])
|
||||
|
||||
words.append(str(self.apk_info.package))
|
||||
if self.apk_info.activity:
|
||||
words[-1] += '/{}'.format(self.apk_info.activity)
|
||||
|
||||
self.cmd = ' '.join(quote(x) for x in words)
|
||||
self.instrument_thread = threading.Thread(target=self._start_instrument)
|
||||
|
||||
def start_activity(self):
|
||||
self.instrument_thread.start()
|
||||
|
||||
def wait_instrument_over(self):
|
||||
self.instrument_thread.join()
|
||||
if 'Error:' in self._instrument_output:
|
||||
cmd = 'am force-stop {}'.format(self.apk_info.package)
|
||||
self.target.execute(cmd)
|
||||
raise WorkloadError(self._instrument_output)
|
||||
|
||||
def _start_instrument(self):
|
||||
self._instrument_output = self.target.execute(self.cmd)
|
||||
self.logger.debug(self._instrument_output)
|
||||
|
||||
@property
|
||||
def instrument_output(self):
|
||||
if self.instrument_thread.is_alive():
|
||||
self.instrument_thread.join() # writes self._instrument_output
|
||||
return self._instrument_output
|
||||
|
@ -20,6 +20,7 @@ import time
|
||||
from wa import Instrument, Parameter
|
||||
from wa.framework.exception import ConfigError, InstrumentError
|
||||
from wa.framework.instrument import extremely_slow
|
||||
from wa.utils.types import identifier
|
||||
|
||||
|
||||
class DelayInstrument(Instrument):
|
||||
@ -32,7 +33,7 @@ class DelayInstrument(Instrument):
|
||||
The delay may be specified as either a fixed period or a temperature
|
||||
threshold that must be reached.
|
||||
|
||||
Optionally, if an active cooling solution is available on the device tqgitq
|
||||
Optionally, if an active cooling solution is available on the device to
|
||||
speed up temperature drop between runs, it may be controlled using this
|
||||
instrument.
|
||||
|
||||
@ -222,7 +223,7 @@ class DelayInstrument(Instrument):
|
||||
for module in self.active_cooling_modules:
|
||||
if self.target.has(module):
|
||||
if not cooling_module:
|
||||
cooling_module = getattr(self.target, module)
|
||||
cooling_module = getattr(self.target, identifier(module))
|
||||
else:
|
||||
msg = 'Multiple cooling modules found "{}" "{}".'
|
||||
raise InstrumentError(msg.format(cooling_module.name, module))
|
||||
|
@ -164,7 +164,7 @@ class FpsInstrument(Instrument):
|
||||
os.remove(entry)
|
||||
|
||||
if not frame_count.value:
|
||||
context.add_event('Could not frind frames data in gfxinfo output')
|
||||
context.add_event('Could not find frames data in gfxinfo output')
|
||||
context.set_status('PARTIAL')
|
||||
|
||||
self.check_for_crash(context, fps.value, frame_count.value,
|
||||
|
@ -174,8 +174,14 @@ class SysfsExtractor(Instrument):
|
||||
self.target.list_directory(dev_dir)):
|
||||
self.logger.error('sysfs files were not pulled from the device.')
|
||||
self.device_and_host_paths.remove(paths) # Path is removed to skip diffing it
|
||||
for _, before_dir, after_dir, diff_dir in self.device_and_host_paths:
|
||||
for dev_dir, before_dir, after_dir, diff_dir in self.device_and_host_paths:
|
||||
diff_sysfs_dirs(before_dir, after_dir, diff_dir)
|
||||
context.add_artifact('{} [before]'.format(dev_dir), before_dir,
|
||||
kind='data', classifiers={'stage': 'before'})
|
||||
context.add_artifact('{} [after]'.format(dev_dir), after_dir,
|
||||
kind='data', classifiers={'stage': 'after'})
|
||||
context.add_artifact('{} [diff]'.format(dev_dir), diff_dir,
|
||||
kind='data', classifiers={'stage': 'diff'})
|
||||
|
||||
def teardown(self, context):
|
||||
self._one_time_setup_done = []
|
||||
@ -276,9 +282,15 @@ class InterruptStatsInstrument(Instrument):
|
||||
wfh.write(self.target.execute('cat /proc/interrupts'))
|
||||
|
||||
def update_output(self, context):
|
||||
context.add_artifact('interrupts [before]', self.before_file, kind='data',
|
||||
classifiers={'stage': 'before'})
|
||||
# If workload execution failed, the after_file may not have been created.
|
||||
if os.path.isfile(self.after_file):
|
||||
diff_interrupt_files(self.before_file, self.after_file, _f(self.diff_file))
|
||||
context.add_artifact('interrupts [after]', self.after_file, kind='data',
|
||||
classifiers={'stage': 'after'})
|
||||
context.add_artifact('interrupts [diff]', self.diff_file, kind='data',
|
||||
classifiers={'stage': 'diff'})
|
||||
|
||||
|
||||
class DynamicFrequencyInstrument(SysfsExtractor):
|
||||
|
Binary file not shown.
Binary file not shown.
@ -196,7 +196,7 @@ int main(int argc, char ** argv) {
|
||||
|
||||
strip(buf);
|
||||
printf(",%s", buf);
|
||||
buf[0] = '\0'; // "Empty" buffer
|
||||
memset(buf, 0, sizeof(buf)); // "Empty" buffer
|
||||
}
|
||||
printf("\n");
|
||||
usleep(interval);
|
||||
|
@ -16,6 +16,7 @@
|
||||
import os
|
||||
import uuid
|
||||
import collections
|
||||
import tarfile
|
||||
|
||||
try:
|
||||
import psycopg2
|
||||
@ -24,6 +25,7 @@ try:
|
||||
except ImportError as e:
|
||||
psycopg2 = None
|
||||
import_error_msg = e.args[0] if e.args else str(e)
|
||||
|
||||
from devlib.target import KernelVersion, KernelConfig
|
||||
|
||||
from wa import OutputProcessor, Parameter, OutputProcessorError
|
||||
@ -88,10 +90,10 @@ class PostgresqlResultProcessor(OutputProcessor):
|
||||
"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
|
||||
"update_run": "UPDATE Runs SET event_summary=%s, status=%s, timestamp=%s, end_time=%s, duration=%s, state=%s WHERE oid=%s;",
|
||||
"create_job": "INSERT INTO Jobs (oid, run_oid, status, retry, label, job_id, iterations, workload_name, metadata, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);",
|
||||
"create_target": "INSERT INTO Targets (oid, run_oid, target, cpus, os, os_version, hostid, hostname, abi, is_rooted, kernel_version, kernel_release, kernel_sha1, kernel_config, sched_features, page_size_kb, screen_resolution, prop, android_id, _pod_version, _pod_serialization_version) "
|
||||
"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
|
||||
"create_target": "INSERT INTO Targets (oid, run_oid, target, cpus, os, os_version, hostid, hostname, abi, is_rooted, kernel_version, kernel_release, kernel_sha1, kernel_config, sched_features, page_size_kb, system_id, screen_resolution, prop, android_id, _pod_version, _pod_serialization_version) "
|
||||
"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
|
||||
"create_event": "INSERT INTO Events (oid, run_oid, job_oid, timestamp, message, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s",
|
||||
"create_artifact": "INSERT INTO Artifacts (oid, run_oid, job_oid, name, large_object_uuid, description, kind, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)",
|
||||
"create_artifact": "INSERT INTO Artifacts (oid, run_oid, job_oid, name, large_object_uuid, description, kind, is_dir, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
|
||||
"create_metric": "INSERT INTO Metrics (oid, run_oid, job_oid, name, value, units, lower_is_better, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s , %s, %s, %s)",
|
||||
"create_augmentation": "INSERT INTO Augmentations (oid, run_oid, name) VALUES (%s, %s, %s)",
|
||||
"create_classifier": "INSERT INTO Classifiers (oid, artifact_oid, metric_oid, job_oid, run_oid, key, value) VALUES (%s, %s, %s, %s, %s, %s, %s)",
|
||||
@ -205,12 +207,13 @@ class PostgresqlResultProcessor(OutputProcessor):
|
||||
target_info.kernel_config,
|
||||
target_pod['sched_features'],
|
||||
target_pod['page_size_kb'],
|
||||
target_pod['system_id'],
|
||||
# Android Specific
|
||||
target_pod.get('screen_resolution'),
|
||||
list(target_pod.get('screen_resolution', [])),
|
||||
target_pod.get('prop'),
|
||||
target_pod.get('android_id'),
|
||||
target_pod.get('pod_version'),
|
||||
target_pod.get('pod_serialization_version'),
|
||||
target_pod.get('_pod_version'),
|
||||
target_pod.get('_pod_serialization_version'),
|
||||
)
|
||||
)
|
||||
|
||||
@ -532,7 +535,7 @@ class PostgresqlResultProcessor(OutputProcessor):
|
||||
'with the create command'
|
||||
raise OutputProcessorError(msg.format(db_schema_version, local_schema_version))
|
||||
|
||||
def _sql_write_lobject(self, source, lobject):
|
||||
def _sql_write_file_lobject(self, source, lobject):
|
||||
with open(source) as lobj_file:
|
||||
lobj_data = lobj_file.read()
|
||||
if len(lobj_data) > 50000000: # Notify if LO inserts larger than 50MB
|
||||
@ -540,10 +543,18 @@ class PostgresqlResultProcessor(OutputProcessor):
|
||||
lobject.write(lobj_data)
|
||||
self.conn.commit()
|
||||
|
||||
def _sql_write_dir_lobject(self, source, lobject):
|
||||
with tarfile.open(fileobj=lobject, mode='w|gz') as lobj_dir:
|
||||
lobj_dir.add(source, arcname='.')
|
||||
self.conn.commit()
|
||||
|
||||
def _sql_update_artifact(self, artifact, output_object):
|
||||
self.logger.debug('Updating artifact: {}'.format(artifact))
|
||||
lobj = self.conn.lobject(oid=self.artifacts_already_added[artifact], mode='w')
|
||||
self._sql_write_lobject(os.path.join(output_object.basepath, artifact.path), lobj)
|
||||
if artifact.is_dir:
|
||||
self._sql_write_dir_lobject(os.path.join(output_object.basepath, artifact.path), lobj)
|
||||
else:
|
||||
self._sql_write_file_lobject(os.path.join(output_object.basepath, artifact.path), lobj)
|
||||
|
||||
def _sql_create_artifact(self, artifact, output_object, record_in_added=False, job_uuid=None):
|
||||
self.logger.debug('Uploading artifact: {}'.format(artifact))
|
||||
@ -551,8 +562,10 @@ class PostgresqlResultProcessor(OutputProcessor):
|
||||
lobj = self.conn.lobject()
|
||||
loid = lobj.oid
|
||||
large_object_uuid = uuid.uuid4()
|
||||
|
||||
self._sql_write_lobject(os.path.join(output_object.basepath, artifact.path), lobj)
|
||||
if artifact.is_dir:
|
||||
self._sql_write_dir_lobject(os.path.join(output_object.basepath, artifact.path), lobj)
|
||||
else:
|
||||
self._sql_write_file_lobject(os.path.join(output_object.basepath, artifact.path), lobj)
|
||||
|
||||
self.cursor.execute(
|
||||
self.sql_command['create_large_object'],
|
||||
@ -571,6 +584,7 @@ class PostgresqlResultProcessor(OutputProcessor):
|
||||
large_object_uuid,
|
||||
artifact.description,
|
||||
str(artifact.kind),
|
||||
artifact.is_dir,
|
||||
artifact._pod_version, # pylint: disable=protected-access
|
||||
artifact._pod_serialization_version, # pylint: disable=protected-access
|
||||
)
|
||||
|
@ -41,7 +41,6 @@ else:
|
||||
from itertools import chain, cycle
|
||||
from distutils.spawn import find_executable # pylint: disable=no-name-in-module, import-error
|
||||
|
||||
import yaml
|
||||
from dateutil import tz
|
||||
|
||||
# pylint: disable=wrong-import-order
|
||||
@ -325,6 +324,11 @@ def load_struct_from_python(filepath=None, text=None):
|
||||
def load_struct_from_yaml(filepath=None, text=None):
|
||||
"""Parses a config structure from a .yaml file. The structure should be composed
|
||||
of basic Python types (strings, ints, lists, dicts, etc.)."""
|
||||
|
||||
# Import here to avoid circular imports
|
||||
# pylint: disable=wrong-import-position,cyclic-import
|
||||
from wa.utils.serializer import yaml
|
||||
|
||||
if not (filepath or text) or (filepath and text):
|
||||
raise ValueError('Exactly one of filepath or text must be specified.')
|
||||
try:
|
||||
|
@ -199,7 +199,6 @@ def create_iterable_adapter(array_columns, explicit_iterate=False):
|
||||
array_string = "{" + array_string + "}"
|
||||
final_string = final_string + array_string + ","
|
||||
final_string = final_string.strip(",")
|
||||
final_string = "{" + final_string + "}"
|
||||
else:
|
||||
# Simply return each item in the array
|
||||
if explicit_iterate:
|
||||
@ -208,8 +207,7 @@ def create_iterable_adapter(array_columns, explicit_iterate=False):
|
||||
else:
|
||||
for item in param:
|
||||
final_string = final_string + str(item) + ","
|
||||
final_string = "{" + final_string + "}"
|
||||
return AsIs("'{}'".format(final_string))
|
||||
return AsIs("'{{{}}}'".format(final_string))
|
||||
return adapt_iterable
|
||||
|
||||
|
||||
@ -245,10 +243,10 @@ def get_schema(schemafilepath):
|
||||
def get_database_schema_version(conn):
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute('''SELECT
|
||||
DatabaseMeta.schema_major,
|
||||
DatabaseMeta.schema_minor
|
||||
FROM
|
||||
DatabaseMeta;''')
|
||||
DatabaseMeta.schema_major,
|
||||
DatabaseMeta.schema_minor
|
||||
FROM
|
||||
DatabaseMeta;''')
|
||||
schema_major, schema_minor = cursor.fetchone()
|
||||
return (schema_major, schema_minor)
|
||||
|
||||
|
@ -59,10 +59,17 @@ to specify it explicitly.
|
||||
import os
|
||||
import re
|
||||
import json as _json
|
||||
from collections import OrderedDict
|
||||
from collections import OrderedDict, Hashable
|
||||
from datetime import datetime
|
||||
import dateutil.parser
|
||||
import yaml as _yaml # pylint: disable=wrong-import-order
|
||||
from yaml import MappingNode
|
||||
try:
|
||||
from yaml import FullLoader as _yaml_loader
|
||||
except ImportError:
|
||||
from yaml import Loader as _yaml_loader
|
||||
from yaml.constructor import ConstructorError
|
||||
|
||||
|
||||
# pylint: disable=redefined-builtin
|
||||
from past.builtins import basestring # pylint: disable=wrong-import-order
|
||||
@ -203,16 +210,6 @@ def _wa_cpu_mask_representer(dumper, data):
|
||||
return dumper.represent_scalar(_cpu_mask_tag, data.mask())
|
||||
|
||||
|
||||
def _wa_dict_constructor(loader, node):
|
||||
pairs = loader.construct_pairs(node)
|
||||
seen_keys = set()
|
||||
for k, _ in pairs:
|
||||
if k in seen_keys:
|
||||
raise ValueError('Duplicate entry: {}'.format(k))
|
||||
seen_keys.add(k)
|
||||
return OrderedDict(pairs)
|
||||
|
||||
|
||||
def _wa_regex_constructor(loader, node):
|
||||
value = loader.construct_scalar(node)
|
||||
flags, pattern = value.split(':', 1)
|
||||
@ -230,14 +227,34 @@ def _wa_cpu_mask_constructor(loader, node):
|
||||
return cpu_mask(value)
|
||||
|
||||
|
||||
class _WaYamlLoader(_yaml_loader): # pylint: disable=too-many-ancestors
|
||||
|
||||
def construct_mapping(self, node, deep=False):
|
||||
if isinstance(node, MappingNode):
|
||||
self.flatten_mapping(node)
|
||||
if not isinstance(node, MappingNode):
|
||||
raise ConstructorError(None, None,
|
||||
"expected a mapping node, but found %s" % node.id,
|
||||
node.start_mark)
|
||||
mapping = OrderedDict()
|
||||
for key_node, value_node in node.value:
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
if not isinstance(key, Hashable):
|
||||
raise ConstructorError("while constructing a mapping", node.start_mark,
|
||||
"found unhashable key", key_node.start_mark)
|
||||
value = self.construct_object(value_node, deep=deep)
|
||||
mapping[key] = value
|
||||
return mapping
|
||||
|
||||
|
||||
_yaml.add_representer(OrderedDict, _wa_dict_representer)
|
||||
_yaml.add_representer(regex_type, _wa_regex_representer)
|
||||
_yaml.add_representer(level, _wa_level_representer)
|
||||
_yaml.add_representer(cpu_mask, _wa_cpu_mask_representer)
|
||||
_yaml.add_constructor(_mapping_tag, _wa_dict_constructor)
|
||||
_yaml.add_constructor(_regex_tag, _wa_regex_constructor)
|
||||
_yaml.add_constructor(_level_tag, _wa_level_constructor)
|
||||
_yaml.add_constructor(_cpu_mask_tag, _wa_cpu_mask_constructor)
|
||||
_yaml.add_constructor(_regex_tag, _wa_regex_constructor, Loader=_WaYamlLoader)
|
||||
_yaml.add_constructor(_level_tag, _wa_level_constructor, Loader=_WaYamlLoader)
|
||||
_yaml.add_constructor(_cpu_mask_tag, _wa_cpu_mask_constructor, Loader=_WaYamlLoader)
|
||||
_yaml.add_constructor(_mapping_tag, _WaYamlLoader.construct_yaml_map, Loader=_WaYamlLoader)
|
||||
|
||||
|
||||
class yaml(object):
|
||||
@ -249,12 +266,13 @@ class yaml(object):
|
||||
@staticmethod
|
||||
def load(fh, *args, **kwargs):
|
||||
try:
|
||||
return _yaml.load(fh, *args, **kwargs)
|
||||
return _yaml.load(fh, *args, Loader=_WaYamlLoader, **kwargs)
|
||||
except _yaml.YAMLError as e:
|
||||
lineno = None
|
||||
if hasattr(e, 'problem_mark'):
|
||||
lineno = e.problem_mark.line # pylint: disable=no-member
|
||||
raise SerializerSyntaxError(e.args[0] if e.args else str(e), lineno)
|
||||
message = e.args[0] if (e.args and e.args[0]) else str(e)
|
||||
raise SerializerSyntaxError(message, lineno)
|
||||
|
||||
loads = load
|
||||
|
||||
|
@ -170,9 +170,9 @@ def list_or_caseless_string(value):
|
||||
def list_or(type_):
|
||||
"""
|
||||
Generator for "list or" types. These take either a single value or a list
|
||||
values and return a list of the specfied ``type_`` performing the
|
||||
values and return a list of the specified ``type_`` performing the
|
||||
conversion on the value (if a single value is specified) or each of the
|
||||
elemented of the specified list.
|
||||
elements of the specified list.
|
||||
|
||||
"""
|
||||
list_type = list_of(type_)
|
||||
@ -208,6 +208,13 @@ def regex(value):
|
||||
return re.compile(value)
|
||||
|
||||
|
||||
def version_tuple(v):
|
||||
"""
|
||||
Converts a version string into a tuple of ints that can be used for natural comparison.
|
||||
"""
|
||||
return tuple(map(int, (v.split("."))))
|
||||
|
||||
|
||||
__counters = defaultdict(int)
|
||||
|
||||
|
||||
@ -281,7 +288,7 @@ class prioritylist(object):
|
||||
|
||||
- ``new_element`` the element to be inserted in the prioritylist
|
||||
- ``priority`` is the priority of the element which specifies its
|
||||
order withing the List
|
||||
order within the List
|
||||
"""
|
||||
self._add_element(new_element, priority)
|
||||
|
||||
|
69
wa/workloads/aitutu/__init__.py
Executable file
69
wa/workloads/aitutu/__init__.py
Executable file
@ -0,0 +1,69 @@
|
||||
# Copyright 2014-2018 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import re
|
||||
|
||||
from wa import ApkUiautoWorkload
|
||||
from wa.framework.exception import WorkloadError
|
||||
|
||||
|
||||
class Aitutu(ApkUiautoWorkload):
|
||||
|
||||
name = 'aitutu'
|
||||
package_names = ['com.antutu.aibenchmark']
|
||||
regex_matches = [re.compile(r'Overall Score ([\d.]+)'),
|
||||
re.compile(r'Image Total Score ([\d.]+) ([\w]+) ([\w]+)'),
|
||||
re.compile(r'Image Speed Score ([\d.]+) ([\w]+) ([\w]+)'),
|
||||
re.compile(r'Image Accuracy Score ([\d.]+) ([\w]+) ([\w]+)'),
|
||||
re.compile(r'Object Total Score ([\d.]+) ([\w]+) ([\w]+)'),
|
||||
re.compile(r'Object Speed Score ([\d.]+) ([\w]+) ([\w]+)'),
|
||||
re.compile(r'Object Accuracy Score ([\d.]+) ([\w]+) ([\w]+)')]
|
||||
description = '''
|
||||
Executes Aitutu Image Speed/Accuracy and Object Speed/Accuracy tests
|
||||
|
||||
The Aitutu workflow carries out the following tasks.
|
||||
1. Open Aitutu application
|
||||
2. Download the resources for the test
|
||||
3. Execute the tests
|
||||
|
||||
Known working APK version: 1.0.3
|
||||
'''
|
||||
|
||||
requires_network = True
|
||||
|
||||
def __init__(self, target, **kwargs):
|
||||
super(Aitutu, self).__init__(target, **kwargs)
|
||||
self.gui.timeout = 1200000
|
||||
|
||||
def update_output(self, context):
|
||||
super(Aitutu, self).update_output(context)
|
||||
expected_results = len(self.regex_matches)
|
||||
logcat_file = context.get_artifact_path('logcat')
|
||||
with open(logcat_file) as fh:
|
||||
for line in fh:
|
||||
for regex in self.regex_matches:
|
||||
match = regex.search(line)
|
||||
if match:
|
||||
classifiers = {}
|
||||
result = match.group(1)
|
||||
if (len(match.groups())) > 1:
|
||||
entry = regex.pattern.rsplit(None, 3)[0]
|
||||
classifiers = {'model': match.group(3)}
|
||||
else:
|
||||
entry = regex.pattern.rsplit(None, 1)[0]
|
||||
context.add_metric(entry, result, '', lower_is_better=False, classifiers=classifiers)
|
||||
expected_results -= 1
|
||||
if expected_results > 0:
|
||||
msg = "The Aitutu workload has failed. Expected {} scores, Detected {} scores."
|
||||
raise WorkloadError(msg.format(len(self.regex_matches), expected_results))
|
BIN
wa/workloads/aitutu/com.arm.wa.uiauto.aitutu.apk
Normal file
BIN
wa/workloads/aitutu/com.arm.wa.uiauto.aitutu.apk
Normal file
Binary file not shown.
35
wa/workloads/aitutu/uiauto/app/build.gradle
Normal file
35
wa/workloads/aitutu/uiauto/app/build.gradle
Normal file
@ -0,0 +1,35 @@
|
||||
apply plugin: 'com.android.application'
|
||||
|
||||
def packageName = "com.arm.wa.uiauto.aitutu"
|
||||
|
||||
android {
|
||||
compileSdkVersion 25
|
||||
buildToolsVersion '25.0.0'
|
||||
defaultConfig {
|
||||
applicationId "${packageName}"
|
||||
minSdkVersion 18
|
||||
targetSdkVersion 25
|
||||
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
|
||||
}
|
||||
buildTypes {
|
||||
applicationVariants.all { variant ->
|
||||
variant.outputs.each { output ->
|
||||
output.outputFile = file("$project.buildDir/apk/${packageName}.apk")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile fileTree(include: ['*.jar'], dir: 'libs')
|
||||
compile 'com.android.support.test:runner:0.5'
|
||||
compile 'com.android.support.test:rules:0.5'
|
||||
compile 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'
|
||||
compile(name: 'uiauto', ext: 'aar')
|
||||
}
|
||||
|
||||
repositories {
|
||||
flatDir {
|
||||
dirs 'libs'
|
||||
}
|
||||
}
|
12
wa/workloads/aitutu/uiauto/app/src/main/AndroidManifest.xml
Normal file
12
wa/workloads/aitutu/uiauto/app/src/main/AndroidManifest.xml
Normal file
@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
package="com.arm.wa.uiauto.aitutu"
|
||||
android:versionCode="1"
|
||||
android:versionName="1.0">
|
||||
|
||||
|
||||
<instrumentation
|
||||
android:name="android.support.test.runner.AndroidJUnitRunner"
|
||||
android:targetPackage="${applicationId}"/>
|
||||
|
||||
</manifest>
|
143
wa/workloads/aitutu/uiauto/app/src/main/java/com/arm/wa/uiauto/aitutu/UiAutomation.java
Executable file
143
wa/workloads/aitutu/uiauto/app/src/main/java/com/arm/wa/uiauto/aitutu/UiAutomation.java
Executable file
@ -0,0 +1,143 @@
|
||||
/* Copyright 2013-2018 ARM Limited
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
package com.arm.wa.uiauto.aitutu;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.os.Bundle;
|
||||
import android.graphics.Rect;
|
||||
import android.support.test.runner.AndroidJUnit4;
|
||||
import android.support.test.uiautomator.UiObject;
|
||||
import android.support.test.uiautomator.UiObjectNotFoundException;
|
||||
import android.support.test.uiautomator.UiSelector;
|
||||
import android.support.test.uiautomator.UiScrollable;
|
||||
import android.view.KeyEvent;
|
||||
import android.util.Log;
|
||||
|
||||
import com.arm.wa.uiauto.BaseUiAutomation;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class UiAutomation extends BaseUiAutomation {
|
||||
|
||||
public static String TAG = "UXPERF";
|
||||
|
||||
@Test
|
||||
public void setup() throws Exception {
|
||||
clearPopups();
|
||||
downloadAssets();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void runWorkload() throws Exception {
|
||||
runBenchmark();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void extractResults() throws Exception {
|
||||
getScores();
|
||||
}
|
||||
|
||||
public void clearPopups() throws Exception {
|
||||
UiSelector selector = new UiSelector();
|
||||
|
||||
UiObject cancel = mDevice.findObject(selector.textContains("CANCEL")
|
||||
.className("android.widget.Button"));
|
||||
cancel.waitForExists(60000);
|
||||
if (cancel.exists()){
|
||||
cancel.click();
|
||||
}
|
||||
//waitObject(cancel);
|
||||
//cancel.click();
|
||||
}
|
||||
|
||||
public void downloadAssets() throws Exception {
|
||||
UiSelector selector = new UiSelector();
|
||||
//Start the tests
|
||||
UiObject start = mDevice.findObject(selector.textContains("Start Testing")
|
||||
.className("android.widget.TextView"));
|
||||
waitObject(start);
|
||||
start.click();
|
||||
|
||||
UiObject check = mDevice.findObject(selector.textContains("classification")
|
||||
.className("android.widget.TextView"));
|
||||
waitObject(check);
|
||||
}
|
||||
|
||||
public void runBenchmark() throws Exception {
|
||||
UiSelector selector = new UiSelector();
|
||||
|
||||
//Wait for the tests to complete
|
||||
UiObject complete =
|
||||
mDevice.findObject(selector.text("TEST AGAIN")
|
||||
.className("android.widget.Button"));
|
||||
complete.waitForExists(1200000);
|
||||
|
||||
}
|
||||
|
||||
public void getScores() throws Exception {
|
||||
UiSelector selector = new UiSelector();
|
||||
//Declare the models used
|
||||
UiObject imageMod =
|
||||
mDevice.findObject(selector.resourceId("com.antutu.aibenchmark:id/recyclerView"))
|
||||
.getChild(selector.index(1))
|
||||
.getChild(selector.resourceId("com.antutu.aibenchmark:id/textViewAIModelName"));
|
||||
UiObject objectMod =
|
||||
mDevice.findObject(selector.resourceId("com.antutu.aibenchmark:id/recyclerView"))
|
||||
.getChild(selector.index(4))
|
||||
.getChild(selector.resourceId("com.antutu.aibenchmark:id/textViewAIModelName"));
|
||||
//Log the scores and models
|
||||
UiObject totalScore =
|
||||
mDevice.findObject(selector.resourceId("com.antutu.aibenchmark:id/textViewTotalScore"));
|
||||
Log.d(TAG, "Overall Score " + totalScore.getText());
|
||||
UiObject imageTotal =
|
||||
mDevice.findObject(selector.resourceId("com.antutu.aibenchmark:id/recyclerView"))
|
||||
.getChild(selector.index(1))
|
||||
.getChild(selector.resourceId("com.antutu.aibenchmark:id/textViewSIDScore"));
|
||||
Log.d(TAG, "Image Total Score " + imageTotal.getText() + " Model " + imageMod.getText());
|
||||
UiObject imageSpeed =
|
||||
mDevice.findObject(selector.resourceId("com.antutu.aibenchmark:id/recyclerView"))
|
||||
.getChild(selector.index(2))
|
||||
.getChild(selector.resourceId("com.antutu.aibenchmark:id/textViewBIDScore"));
|
||||
Log.d(TAG, "Image Speed Score " + imageSpeed.getText() + " Model " + imageMod.getText());
|
||||
UiObject imageAcc =
|
||||
mDevice.findObject(selector.resourceId("com.antutu.aibenchmark:id/recyclerView"))
|
||||
.getChild(selector.index(3))
|
||||
.getChild(selector.resourceId("com.antutu.aibenchmark:id/textViewBIDScore"));
|
||||
Log.d(TAG, "Image Accuracy Score " + imageAcc.getText() + " Model " + imageMod.getText());
|
||||
UiObject objectTotal =
|
||||
mDevice.findObject(selector.resourceId("com.antutu.aibenchmark:id/recyclerView"))
|
||||
.getChild(selector.index(4))
|
||||
.getChild(selector.resourceId("com.antutu.aibenchmark:id/textViewSIDScore"));
|
||||
Log.d(TAG, "Object Total Score " + objectTotal.getText() + " Model " + objectMod.getText());
|
||||
UiObject objectSpeed =
|
||||
mDevice.findObject(selector.resourceId("com.antutu.aibenchmark:id/recyclerView"))
|
||||
.getChild(selector.index(5))
|
||||
.getChild(selector.resourceId("com.antutu.aibenchmark:id/textViewBIDScore"));
|
||||
Log.d(TAG, "Object Speed Score " + objectSpeed.getText() + " Model " + objectMod.getText());
|
||||
UiObject objectAcc =
|
||||
mDevice.findObject(selector.resourceId("com.antutu.aibenchmark:id/recyclerView"))
|
||||
.getChild(selector.index(6))
|
||||
.getChild(selector.resourceId("com.antutu.aibenchmark:id/textViewBIDScore"));
|
||||
Log.d(TAG, "Object Accuracy Score " + objectAcc.getText() + " Model " + objectMod.getText());
|
||||
}
|
||||
}
|
23
wa/workloads/aitutu/uiauto/build.gradle
Normal file
23
wa/workloads/aitutu/uiauto/build.gradle
Normal file
@ -0,0 +1,23 @@
|
||||
// Top-level build file where you can add configuration options common to all sub-projects/modules.
|
||||
|
||||
buildscript {
|
||||
repositories {
|
||||
jcenter()
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:2.3.1'
|
||||
|
||||
// NOTE: Do not place your application dependencies here; they belong
|
||||
// in the individual module build.gradle files
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
repositories {
|
||||
jcenter()
|
||||
}
|
||||
}
|
||||
|
||||
task clean(type: Delete) {
|
||||
delete rootProject.buildDir
|
||||
}
|
55
wa/workloads/aitutu/uiauto/build.sh
Executable file
55
wa/workloads/aitutu/uiauto/build.sh
Executable file
@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
# Copyright 2013-2018 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
set -e
|
||||
|
||||
# CD into build dir if possible - allows building from any directory
|
||||
script_path='.'
|
||||
if `readlink -f $0 &>/dev/null`; then
|
||||
script_path=`readlink -f $0 2>/dev/null`
|
||||
fi
|
||||
script_dir=`dirname $script_path`
|
||||
cd $script_dir
|
||||
|
||||
# Ensure gradelw exists before starting
|
||||
if [[ ! -f gradlew ]]; then
|
||||
echo 'gradlew file not found! Check that you are in the right directory.'
|
||||
exit 9
|
||||
fi
|
||||
|
||||
# Copy base class library from wa dist
|
||||
libs_dir=app/libs
|
||||
base_class=`python -c "import os, wa; print os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar')"`
|
||||
mkdir -p $libs_dir
|
||||
cp $base_class $libs_dir
|
||||
|
||||
# Build and return appropriate exit code if failed
|
||||
# gradle build
|
||||
./gradlew clean :app:assembleDebug
|
||||
exit_code=$?
|
||||
if [[ $exit_code -ne 0 ]]; then
|
||||
echo "ERROR: 'gradle build' exited with code $exit_code"
|
||||
exit $exit_code
|
||||
fi
|
||||
|
||||
# If successful move APK file to workload folder (overwrite previous)
|
||||
package=com.arm.wa.uiauto.aitutu
|
||||
rm -f ../$package
|
||||
if [[ -f app/build/apk/$package.apk ]]; then
|
||||
cp app/build/apk/$package.apk ../$package.apk
|
||||
else
|
||||
echo 'ERROR: UiAutomator apk could not be found!'
|
||||
exit 9
|
||||
fi
|
BIN
wa/workloads/aitutu/uiauto/gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
BIN
wa/workloads/aitutu/uiauto/gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
Binary file not shown.
6
wa/workloads/aitutu/uiauto/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
6
wa/workloads/aitutu/uiauto/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
#Wed May 03 15:42:44 BST 2017
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
|
160
wa/workloads/aitutu/uiauto/gradlew
vendored
Executable file
160
wa/workloads/aitutu/uiauto/gradlew
vendored
Executable file
@ -0,0 +1,160 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS=""
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
||||
warn ( ) {
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
die ( ) {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
esac
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD="java"
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=$((i+1))
|
||||
done
|
||||
case $i in
|
||||
(0) set -- ;;
|
||||
(1) set -- "$args0" ;;
|
||||
(2) set -- "$args0" "$args1" ;;
|
||||
(3) set -- "$args0" "$args1" "$args2" ;;
|
||||
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
|
||||
function splitJvmOpts() {
|
||||
JVM_OPTS=("$@")
|
||||
}
|
||||
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
|
||||
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
|
||||
|
||||
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
|
90
wa/workloads/aitutu/uiauto/gradlew.bat
vendored
Normal file
90
wa/workloads/aitutu/uiauto/gradlew.bat
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS=
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windowz variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
if "%@eval[2+2]" == "4" goto 4NT_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
goto execute
|
||||
|
||||
:4NT_args
|
||||
@rem Get arguments from the 4NT Shell from JP Software
|
||||
set CMD_LINE_ARGS=%$
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
1
wa/workloads/aitutu/uiauto/settings.gradle
Normal file
1
wa/workloads/aitutu/uiauto/settings.gradle
Normal file
@ -0,0 +1 @@
|
||||
include ':app'
|
Binary file not shown.
@ -81,12 +81,22 @@ public class UiAutomation extends BaseUiAutomation implements ApplaunchInterface
|
||||
// Activate the tab switcher
|
||||
tabSwitcher = mDevice.findObject(new UiSelector().resourceId(packageID + "tab_switcher_button")
|
||||
.className("android.widget.ImageButton"));
|
||||
tabSwitcher.clickAndWaitForNewWindow(uiAutoTimeout);
|
||||
|
||||
// Click the New Tab button
|
||||
newTab = mDevice.findObject(new UiSelector().resourceId(packageID + "new_tab_button")
|
||||
.className("android.widget.Button"));
|
||||
newTab.clickAndWaitForNewWindow(uiAutoTimeout);
|
||||
if (tabSwitcher.exists()){
|
||||
tabSwitcher.clickAndWaitForNewWindow(uiAutoTimeout);
|
||||
// Click the New Tab button
|
||||
newTab = mDevice.findObject(new UiSelector().resourceId(packageID + "new_tab_button")
|
||||
.className("android.widget.Button"));
|
||||
newTab.clickAndWaitForNewWindow(uiAutoTimeout);
|
||||
}
|
||||
// Support Tablet devices which do not have tab switcher
|
||||
else {
|
||||
UiObject menu_button = mDevice.findObject(new UiSelector().resourceId(packageID + "menu_button")
|
||||
.className("android.widget.ImageButton"));
|
||||
menu_button.click();
|
||||
newTab = mDevice.findObject(new UiSelector().resourceId(packageID + "menu_item_text")
|
||||
.textContains("New tab"));
|
||||
newTab.click();
|
||||
}
|
||||
}
|
||||
|
||||
public void followTextLink(String text) throws Exception {
|
||||
|
@ -153,7 +153,8 @@ class Dhrystone(Workload):
|
||||
|
||||
@once
|
||||
def finalize(self, context):
|
||||
self.target.uninstall('dhrystone')
|
||||
if self.uninstall:
|
||||
self.target.uninstall('dhrystone')
|
||||
|
||||
def validate(self):
|
||||
if self.mloops and self.duration: # pylint: disable=E0203
|
||||
|
@ -77,13 +77,13 @@ class ExoPlayer(ApkWorkload):
|
||||
video_directory = os.path.join(settings.dependencies_directory, name)
|
||||
|
||||
package_names = ['com.google.android.exoplayer2.demo']
|
||||
versions = ['2.4', '2.5', '2.6']
|
||||
supported_versions = ['2.4', '2.5', '2.6']
|
||||
action = 'com.google.android.exoplayer.demo.action.VIEW'
|
||||
default_format = 'mov_720p'
|
||||
view = 'SurfaceView - com.google.android.exoplayer2.demo/com.google.android.exoplayer2.demo.PlayerActivity'
|
||||
|
||||
parameters = [
|
||||
Parameter('version', allowed_values=versions, default=versions[-1], override=True),
|
||||
Parameter('version', allowed_values=supported_versions, override=True),
|
||||
Parameter('duration', kind=int, default=20,
|
||||
description="""
|
||||
Playback duration of the video file. This becomes the duration of the workload.
|
||||
|
@ -23,6 +23,7 @@ from collections import defaultdict
|
||||
from wa import ApkUiautoWorkload, Parameter
|
||||
from wa.framework.exception import ConfigError, WorkloadError
|
||||
from wa.utils.misc import capitalize
|
||||
from wa.utils.types import version_tuple
|
||||
|
||||
|
||||
class Geekbench(ApkUiautoWorkload):
|
||||
@ -51,39 +52,16 @@ class Geekbench(ApkUiautoWorkload):
|
||||
http://support.primatelabs.com/kb/geekbench/interpreting-geekbench-scores
|
||||
"""
|
||||
summary_metrics = ['score', 'multicore_score']
|
||||
versions = {
|
||||
'4.3.1': {
|
||||
'package': 'com.primatelabs.geekbench',
|
||||
'activity': '.HomeActivity',
|
||||
},
|
||||
'4.2.0': {
|
||||
'package': 'com.primatelabs.geekbench',
|
||||
'activity': '.HomeActivity',
|
||||
},
|
||||
'4.0.1': {
|
||||
'package': 'com.primatelabs.geekbench',
|
||||
'activity': '.HomeActivity',
|
||||
},
|
||||
# Version 3.4.1 was the final version 3 variant
|
||||
'3.4.1': {
|
||||
'package': 'com.primatelabs.geekbench',
|
||||
'activity': '.HomeActivity',
|
||||
},
|
||||
'3.0.0': {
|
||||
'package': 'com.primatelabs.geekbench3',
|
||||
'activity': '.HomeActivity',
|
||||
},
|
||||
'2': {
|
||||
'package': 'ca.primatelabs.geekbench2',
|
||||
'activity': '.HomeActivity',
|
||||
},
|
||||
}
|
||||
|
||||
supported_versions = ['4.3.2', '4.3.1', '4.2.0', '4.0.1', '3.4.1', '3.0.0', '2']
|
||||
package_names = ['com.primatelabs.geekbench', 'com.primatelabs.geekbench3', 'ca.primatelabs.geekbench2']
|
||||
|
||||
begin_regex = re.compile(r'^\s*D/WebViewClassic.loadDataWithBaseURL\(\s*\d+\s*\)'
|
||||
r'\s*:\s*(?P<content>\<.*)\s*$')
|
||||
replace_regex = re.compile(r'<[^>]*>')
|
||||
|
||||
parameters = [
|
||||
Parameter('version', default=sorted(versions.keys())[-1], allowed_values=sorted(versions.keys()),
|
||||
Parameter('version', allowed_values=supported_versions,
|
||||
description='Specifies which version of the workload should be run.',
|
||||
override=True),
|
||||
Parameter('loops', kind=int, default=1, aliases=['times'],
|
||||
@ -105,27 +83,12 @@ class Geekbench(ApkUiautoWorkload):
|
||||
|
||||
requires_network = True
|
||||
|
||||
@property
|
||||
def activity(self):
|
||||
return self.versions[self.version]['activity']
|
||||
|
||||
@property
|
||||
def package(self):
|
||||
return self.versions[self.version]['package']
|
||||
|
||||
@property
|
||||
def package_names(self):
|
||||
return [self.package]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Geekbench, self).__init__(*args, **kwargs)
|
||||
def initialize(self, context):
|
||||
super(Geekbench, self).initialize(context)
|
||||
self.gui.uiauto_params['version'] = self.version
|
||||
self.gui.uiauto_params['loops'] = self.loops
|
||||
self.gui.uiauto_params['is_corporate'] = self.is_corporate
|
||||
self.gui.timeout = self.timeout
|
||||
|
||||
def initialize(self, context):
|
||||
super(Geekbench, self).initialize(context)
|
||||
if not self.disable_update_result and not self.target.is_rooted:
|
||||
raise WorkloadError(
|
||||
'Geekbench workload requires root to collect results. '
|
||||
@ -135,12 +98,11 @@ class Geekbench(ApkUiautoWorkload):
|
||||
def setup(self, context):
|
||||
super(Geekbench, self).setup(context)
|
||||
self.run_timeout = self.timeout * self.loops
|
||||
self.exact_apk_version = self.version
|
||||
|
||||
def update_output(self, context):
|
||||
super(Geekbench, self).update_output(context)
|
||||
if not self.disable_update_result:
|
||||
major_version = versiontuple(self.version)[0]
|
||||
major_version = version_tuple(self.version)[0]
|
||||
update_method = getattr(self, 'update_result_{}'.format(major_version))
|
||||
update_method(context)
|
||||
|
||||
@ -154,7 +116,7 @@ class Geekbench(ApkUiautoWorkload):
|
||||
score_calculator.update_results(context)
|
||||
|
||||
def update_result_3(self, context):
|
||||
outfile_glob = self.target.path.join(self.target.package_data_directory, self.package, 'files', '*gb3')
|
||||
outfile_glob = self.target.path.join(self.target.package_data_directory, self.apk.package, 'files', '*gb3')
|
||||
on_target_output_files = [f.strip() for f in self.target.execute('ls {}'.format(outfile_glob),
|
||||
as_root=True).split('\n') if f]
|
||||
for i, on_target_output_file in enumerate(on_target_output_files):
|
||||
@ -176,7 +138,7 @@ class Geekbench(ApkUiautoWorkload):
|
||||
section['multicore_score'])
|
||||
|
||||
def update_result_4(self, context):
|
||||
outfile_glob = self.target.path.join(self.target.package_data_directory, self.package, 'files', '*gb*')
|
||||
outfile_glob = self.target.path.join(self.target.package_data_directory, self.apk.package, 'files', '*gb*')
|
||||
on_target_output_files = [f.strip() for f in self.target.execute('ls {}'.format(outfile_glob),
|
||||
as_root=True).split('\n') if f]
|
||||
for i, on_target_output_file in enumerate(on_target_output_files):
|
||||
@ -395,22 +357,14 @@ class GeekbenchCorproate(Geekbench): # pylint: disable=too-many-ancestors
|
||||
name = "geekbench-corporate"
|
||||
is_corporate = True
|
||||
requires_network = False
|
||||
|
||||
versions = ['4.1.0', '5.0.0']
|
||||
|
||||
supported_versions = ['4.1.0', '5.0.0']
|
||||
package_names = ['com.primatelabs.geekbench4.corporate']
|
||||
activity = 'com.primatelabs.geekbench.HomeActivity'
|
||||
package = 'com.primatelabs.geekbench4.corporate'
|
||||
|
||||
parameters = [
|
||||
Parameter('version',
|
||||
default=sorted(versions)[-1], allowed_values=versions,
|
||||
override=True)
|
||||
Parameter('version', allowed_values=supported_versions, override=True)
|
||||
]
|
||||
|
||||
|
||||
def namemify(basename, i):
|
||||
return basename + (' {}'.format(i) if i else '')
|
||||
|
||||
|
||||
def versiontuple(v):
|
||||
return tuple(map(int, (v.split("."))))
|
||||
|
@ -56,10 +56,8 @@ class Glb(ApkUiautoWorkload):
|
||||
view = 'com.glbenchmark.glbenchmark27/com.glbenchmark.activities.GLBRender'
|
||||
|
||||
package_names = ['com.glbenchmark.glbenchmark27', 'com.glbenchmark.glbenchmark25']
|
||||
packages = {
|
||||
'2.7': 'com.glbenchmark.glbenchmark27',
|
||||
'2.5': 'com.glbenchmark.glbenchmark25',
|
||||
}
|
||||
supported_versions = ['2.7', '2.5']
|
||||
|
||||
# If usecase is not specified the default usecase is the first supported usecase alias
|
||||
# for the specified version.
|
||||
supported_usecase_aliases = {
|
||||
@ -74,7 +72,7 @@ class Glb(ApkUiautoWorkload):
|
||||
regex = re.compile(r'GLBenchmark (metric|FPS): (.*)')
|
||||
|
||||
parameters = [
|
||||
Parameter('version', default='2.7', allowed_values=['2.7', '2.5'], override=True,
|
||||
Parameter('version', allowed_values=supported_versions, override=True,
|
||||
description=('Specifies which version of the benchmark to run (different versions '
|
||||
'support different use cases).')),
|
||||
Parameter('use_case', default=None,
|
||||
@ -107,10 +105,9 @@ class Glb(ApkUiautoWorkload):
|
||||
Alias('t-rex_offscreen', use_case='t-rex', type='offscreen'),
|
||||
]
|
||||
|
||||
def __init__(self, target, **kwargs):
|
||||
super(Glb, self).__init__(target, **kwargs)
|
||||
def initialize(self, context):
|
||||
super(Glb, self).initialize(context)
|
||||
self.gui.uiauto_params['version'] = self.version
|
||||
|
||||
if self.use_case is None:
|
||||
self.use_case = self.supported_usecase_aliases[self.version][0]
|
||||
if self.use_case.lower() in USE_CASE_MAP:
|
||||
@ -124,7 +121,6 @@ class Glb(ApkUiautoWorkload):
|
||||
self.gui.uiauto_params['usecase_type'] = self.type.replace(' ', '_')
|
||||
|
||||
self.gui.uiauto_params['timeout'] = self.run_timeout
|
||||
self.package_names = [self.packages[self.version]]
|
||||
|
||||
def update_output(self, context):
|
||||
super(Glb, self).update_output(context)
|
||||
|
@ -90,8 +90,10 @@ class Hackbench(Workload):
|
||||
context.add_metric(label, float(match.group(1)), units)
|
||||
|
||||
def teardown(self, context):
|
||||
self.target.execute('rm -f {}'.format(self.target_output_file))
|
||||
if self.cleanup_assets:
|
||||
self.target.execute('rm -f {}'.format(self.target_output_file))
|
||||
|
||||
@once
|
||||
def finalize(self, context):
|
||||
self.target.uninstall(self.binary_name)
|
||||
if self.uninstall:
|
||||
self.target.uninstall(self.binary_name)
|
||||
|
@ -120,5 +120,5 @@ class HWUITest(Workload):
|
||||
|
||||
@once
|
||||
def finalize(self, context):
|
||||
if self.target_exe:
|
||||
if self.target_exe and self.uninstall:
|
||||
self.target.uninstall(self.target_exe)
|
||||
|
@ -103,6 +103,7 @@ class Lmbench(Workload):
|
||||
setup_test = getattr(self, '_setup_{}'.format(self.test))
|
||||
setup_test()
|
||||
|
||||
def run(self, context):
|
||||
for _ in range(self.loops):
|
||||
for command in self.commands:
|
||||
self.target.execute(command, timeout=self.run_timeout)
|
||||
@ -116,7 +117,8 @@ class Lmbench(Workload):
|
||||
context.add_artifact('lmbench-result', "lmbench.output", kind='raw')
|
||||
|
||||
def teardown(self, context):
|
||||
self.target.uninstall(self.test)
|
||||
if self.uninstall:
|
||||
self.target.uninstall(self.test)
|
||||
|
||||
#
|
||||
# Test setup routines
|
||||
|
@ -85,7 +85,7 @@ class ManualWorkload(Workload):
|
||||
def run(self, context):
|
||||
self.logger.info('START NOW!')
|
||||
if self.duration:
|
||||
self.device.sleep(self.duration)
|
||||
self.target.sleep(self.duration)
|
||||
elif self.user_triggered:
|
||||
self.logger.info('')
|
||||
self.logger.info('hit any key to end your workload execution...')
|
||||
|
@ -278,7 +278,7 @@ class Meabo(Workload):
|
||||
name = 'phase_{}_thread_{}_{}'.format(cur_phase,
|
||||
match.group('thread'),
|
||||
match.group('name'))
|
||||
context.result.add_metric(name, int(match.group('value')))
|
||||
context.add_metric(name, int(match.group('value')))
|
||||
|
||||
match = duration_regex.search(line)
|
||||
if match:
|
||||
@ -286,7 +286,8 @@ class Meabo(Workload):
|
||||
int(match.group('duration')), units="ns")
|
||||
|
||||
def finalize(self, context):
|
||||
self._uninstall_executable()
|
||||
if self.uninstall:
|
||||
self._uninstall_executable()
|
||||
|
||||
def _build_command(self):
|
||||
self.command = self.target_exe
|
||||
|
@ -86,4 +86,5 @@ class Memcpy(Workload):
|
||||
|
||||
@once
|
||||
def finalize(self, context):
|
||||
self.target.uninstall('memcpy')
|
||||
if self.uninstall:
|
||||
self.target.uninstall('memcpy')
|
||||
|
@ -36,7 +36,6 @@ class Mongoperf(Workload):
|
||||
.. note:: ``mongoperf`` seems to ramp up threads in powers of two over a
|
||||
period of tens of seconds (there doesn't appear to be a way to
|
||||
change that). Bear this in mind when setting the ``duration``.
|
||||
|
||||
"""
|
||||
|
||||
parameters = [
|
||||
@ -71,7 +70,6 @@ class Mongoperf(Workload):
|
||||
system, and can use normal file system cache. Use mmf in
|
||||
this mode to test file system cache behavior with memory
|
||||
mapped files.
|
||||
|
||||
"""),
|
||||
Parameter('read', kind=bool, default=True,
|
||||
aliases=['r'],
|
||||
@ -107,7 +105,7 @@ class Mongoperf(Workload):
|
||||
try:
|
||||
self.target.execute('mongoperf -h')
|
||||
except TargetError:
|
||||
raise WorkloadError('Mongoperf must be installed an in $PATH on the target.')
|
||||
raise WorkloadError('Mongoperf must be installed and in $PATH on the target.')
|
||||
|
||||
def setup(self, context):
|
||||
config = {}
|
||||
|
81
wa/workloads/motionmark/__init__.py
Executable file
81
wa/workloads/motionmark/__init__.py
Executable file
@ -0,0 +1,81 @@
|
||||
# Copyright 2014-2019 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import os
|
||||
import re
|
||||
|
||||
from wa import UiautoWorkload, Parameter
|
||||
from wa.framework.exception import ValidationError, WorkloadError
|
||||
from wa.utils.types import list_of_strs
|
||||
from wa.utils.misc import unique
|
||||
|
||||
|
||||
class Motionmark(UiautoWorkload):
|
||||
|
||||
name = 'motionmark'
|
||||
|
||||
description = '''
|
||||
A workload to execute the motionmark web based benchmark
|
||||
|
||||
MotionMark is a graphics benchmark that measures a browser capability to animate complex scenes at a target frame rate
|
||||
|
||||
Test description:
|
||||
1. Open browser application
|
||||
2. Navigate to the motionmark website - http://browserbench.org/MotionMark/
|
||||
3. Execute the benchmark
|
||||
'''
|
||||
|
||||
requires_network = True
|
||||
|
||||
regex = [re.compile(r'Multiply Score (.+)'),
|
||||
re.compile(r'Canvas Score (.+)'),
|
||||
re.compile(r'Leaves Score (.+)'),
|
||||
re.compile(r'Paths Score (.+)'),
|
||||
re.compile(r'Canvas Lines Score (.+)'),
|
||||
re.compile(r'Focus Score (.+)'),
|
||||
re.compile(r'Images Score (.+)'),
|
||||
re.compile(r'Design Score (.+)'),
|
||||
re.compile(r'Suits Score (.+)')]
|
||||
score_regex = re.compile(r'.*?([\d.]+).*')
|
||||
|
||||
def __init__(self, target, **kwargs):
|
||||
super(Motionmark, self).__init__(target, **kwargs)
|
||||
self.gui.timeout = 1500
|
||||
|
||||
def setup(self, context):
|
||||
super(Motionmark, self).setup(context)
|
||||
self.target.open_url('https://browserbench.org/MotionMark/')
|
||||
|
||||
def update_output(self, context):
|
||||
super(Motionmark, self).update_output(context)
|
||||
num_unprocessed_results = len(self.regex)
|
||||
logcat_file = context.get_artifact_path('logcat')
|
||||
with open(logcat_file) as fh:
|
||||
for line in fh:
|
||||
for regex in self.regex:
|
||||
match = regex.search(line)
|
||||
# Check if we have matched the score string in logcat
|
||||
if match:
|
||||
score_match = self.score_regex.search(match.group(1))
|
||||
# Check if there is valid number found for the score.
|
||||
if score_match:
|
||||
result = float(score_match.group(1))
|
||||
else:
|
||||
result = float('NaN')
|
||||
entry = regex.pattern.rsplit(None, 1)[0]
|
||||
context.add_metric(entry, result, 'Score', lower_is_better=False)
|
||||
num_unprocessed_results -= 1
|
||||
if num_unprocessed_results > 0:
|
||||
msg = "The Motionmark workload has failed. Expected {} scores, Missing {} scores."
|
||||
raise WorkloadError(msg.format(len(self.regex), num_unprocessed_results))
|
BIN
wa/workloads/motionmark/com.arm.wa.uiauto.motionmark.apk
Normal file
BIN
wa/workloads/motionmark/com.arm.wa.uiauto.motionmark.apk
Normal file
Binary file not shown.
41
wa/workloads/motionmark/uiauto/app/build.gradle
Normal file
41
wa/workloads/motionmark/uiauto/app/build.gradle
Normal file
@ -0,0 +1,41 @@
|
||||
apply plugin: 'com.android.application'
|
||||
|
||||
def packageName = "com.arm.wa.uiauto.motionmark"
|
||||
|
||||
android {
|
||||
compileSdkVersion 25
|
||||
buildToolsVersion "25.0.3"
|
||||
defaultConfig {
|
||||
applicationId "${packageName}"
|
||||
minSdkVersion 18
|
||||
targetSdkVersion 25
|
||||
versionCode 1
|
||||
versionName "1.0"
|
||||
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
|
||||
}
|
||||
buildTypes {
|
||||
release {
|
||||
minifyEnabled false
|
||||
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
|
||||
}
|
||||
applicationVariants.all { variant ->
|
||||
variant.outputs.each { output ->
|
||||
output.outputFile = file("$project.buildDir/apk/${packageName}.apk")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile fileTree(dir: 'libs', include: ['*.jar'])
|
||||
compile 'com.android.support.test:runner:0.5'
|
||||
compile 'com.android.support.test:rules:0.5'
|
||||
compile 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'
|
||||
compile(name: 'uiauto', ext:'aar')
|
||||
}
|
||||
|
||||
repositories {
|
||||
flatDir {
|
||||
dirs 'libs'
|
||||
}
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
package="com.arm.wa.uiauto.motionmark"
|
||||
android:versionCode="1"
|
||||
android:versionName="1.0">
|
||||
|
||||
|
||||
<instrumentation
|
||||
android:name="android.support.test.runner.AndroidJUnitRunner"
|
||||
android:targetPackage="${applicationId}"/>
|
||||
|
||||
</manifest>
|
||||
|
188
wa/workloads/motionmark/uiauto/app/src/main/java/com/arm/wa/uiauto/motionmark/UiAutomation.java
Executable file
188
wa/workloads/motionmark/uiauto/app/src/main/java/com/arm/wa/uiauto/motionmark/UiAutomation.java
Executable file
@ -0,0 +1,188 @@
|
||||
/* Copyright 2014-2019 ARM Limited
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.arm.wa.uiauto.motionmark;
|
||||
|
||||
import android.os.Bundle;
|
||||
import android.support.test.runner.AndroidJUnit4;
|
||||
import android.support.test.uiautomator.UiObject;
|
||||
import android.support.test.uiautomator.UiObjectNotFoundException;
|
||||
import android.support.test.uiautomator.UiSelector;
|
||||
import android.support.test.uiautomator.UiScrollable;
|
||||
|
||||
import com.arm.wa.uiauto.BaseUiAutomation;
|
||||
import android.util.Log;
|
||||
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class UiAutomation extends BaseUiAutomation {
|
||||
|
||||
private int networkTimeoutSecs = 30;
|
||||
private long networkTimeout = TimeUnit.SECONDS.toMillis(networkTimeoutSecs);
|
||||
public static String TAG = "UXPERF";
|
||||
public boolean textenabled = false;
|
||||
|
||||
@Before
|
||||
public void initialize(){
|
||||
initialize_instrumentation();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setup() throws Exception{
|
||||
clearFirstRun();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void runWorkload() throws Exception {
|
||||
runBenchmark();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void teardown() throws Exception{
|
||||
clearTabs();
|
||||
unsetScreenOrientation();
|
||||
}
|
||||
|
||||
public void clearFirstRun() throws Exception {
|
||||
UiObject accept =
|
||||
mDevice.findObject(new UiSelector().resourceId("com.android.chrome:id/terms_accept")
|
||||
.className("android.widget.Button"));
|
||||
if (accept.exists()){
|
||||
accept.click();
|
||||
UiObject negative =
|
||||
mDevice.findObject(new UiSelector().resourceId("com.android.chrome:id/negative_button")
|
||||
.className("android.widget.Button"));
|
||||
negative.waitForExists(100000);
|
||||
negative.click();
|
||||
}
|
||||
}
|
||||
|
||||
public void runBenchmark() throws Exception {
|
||||
setScreenOrientation(ScreenOrientation.LANDSCAPE);
|
||||
UiScrollable list = new UiScrollable(new UiSelector().scrollable(true));
|
||||
|
||||
UiObject start =
|
||||
mDevice.findObject(new UiSelector().text("Run Benchmark")
|
||||
.className("android.widget.Button"));
|
||||
list.swipeUp(10);
|
||||
if (start.exists()){
|
||||
start.click();
|
||||
} else {
|
||||
UiObject startDesc =
|
||||
mDevice.findObject(new UiSelector().description("Run Benchmark")
|
||||
.className("android.widget.Button"));
|
||||
startDesc.click();
|
||||
}
|
||||
|
||||
UiObject results =
|
||||
mDevice.findObject(new UiSelector().resourceId("results-score")
|
||||
.className("android.widget.GridView"));
|
||||
results.waitForExists(2100000);
|
||||
|
||||
setScreenOrientation(ScreenOrientation.PORTRAIT);
|
||||
|
||||
UiObject multiply =
|
||||
mDevice.findObject(new UiSelector().resourceId("results-score"))
|
||||
.getChild(new UiSelector().index(2))
|
||||
.getChild(new UiSelector().index(0));
|
||||
Log.d(TAG, "Multiply Score " + multiply.getText());
|
||||
|
||||
UiObject canvas =
|
||||
mDevice.findObject(new UiSelector().resourceId("results-score"))
|
||||
.getChild(new UiSelector().index(3))
|
||||
.getChild(new UiSelector().index(0));
|
||||
Log.d(TAG, "Canvas Score " + canvas.getText());
|
||||
|
||||
UiObject leaves =
|
||||
mDevice.findObject(new UiSelector().resourceId("results-score"))
|
||||
.getChild(new UiSelector().index(4))
|
||||
.getChild(new UiSelector().index(0));
|
||||
Log.d(TAG, "Leaves Score " + leaves.getText());
|
||||
|
||||
UiObject paths =
|
||||
mDevice.findObject(new UiSelector().resourceId("results-score"))
|
||||
.getChild(new UiSelector().index(5))
|
||||
.getChild(new UiSelector().index(0));
|
||||
Log.d(TAG, "Paths Score " + paths.getText());
|
||||
|
||||
UiObject canvaslines =
|
||||
mDevice.findObject(new UiSelector().resourceId("results-score"))
|
||||
.getChild(new UiSelector().index(6))
|
||||
.getChild(new UiSelector().index(0));
|
||||
if (!canvaslines.exists() && list.waitForExists(60)) {
|
||||
list.scrollIntoView(canvaslines);
|
||||
}
|
||||
Log.d(TAG, "Canvas Lines Score " + canvaslines.getText());
|
||||
|
||||
UiObject focus =
|
||||
mDevice.findObject(new UiSelector().resourceId("results-score"))
|
||||
.getChild(new UiSelector().index(7))
|
||||
.getChild(new UiSelector().index(0));
|
||||
if (!focus.exists() && list.waitForExists(60)) {
|
||||
list.scrollIntoView(focus);
|
||||
}
|
||||
Log.d(TAG, "Focus Score " + focus.getText());
|
||||
|
||||
UiObject images =
|
||||
mDevice.findObject(new UiSelector().resourceId("results-score"))
|
||||
.getChild(new UiSelector().index(8))
|
||||
.getChild(new UiSelector().index(0));
|
||||
if (!images.exists() && list.waitForExists(60)) {
|
||||
list.scrollIntoView(images);
|
||||
}
|
||||
Log.d(TAG, "Images Score " + images.getText());
|
||||
|
||||
UiObject design =
|
||||
mDevice.findObject(new UiSelector().resourceId("results-score"))
|
||||
.getChild(new UiSelector().index(9))
|
||||
.getChild(new UiSelector().index(0));
|
||||
if (!design.exists() && list.waitForExists(60)) {
|
||||
list.scrollIntoView(design);
|
||||
}
|
||||
Log.d(TAG, "Design Score " + design.getText());
|
||||
|
||||
UiObject suits =
|
||||
mDevice.findObject(new UiSelector().resourceId("results-score"))
|
||||
.getChild(new UiSelector().index(10))
|
||||
.getChild(new UiSelector().index(0));
|
||||
if (!suits.exists() && list.waitForExists(60)) {
|
||||
list.scrollIntoView(suits);
|
||||
}
|
||||
Log.d(TAG, "Suits Score " + suits.getText());
|
||||
}
|
||||
|
||||
public void clearTabs() throws Exception {
|
||||
UiObject tabselector =
|
||||
mDevice.findObject(new UiSelector().resourceId("com.android.chrome:id/tab_switcher_button")
|
||||
.className("android.widget.ImageButton"));
|
||||
if (!tabselector.exists()){
|
||||
return;
|
||||
}
|
||||
tabselector.click();
|
||||
UiObject menu =
|
||||
mDevice.findObject(new UiSelector().resourceId("com.android.chrome:id/menu_button")
|
||||
.className("android.widget.ImageButton"));
|
||||
menu.click();
|
||||
UiObject closetabs =
|
||||
mDevice.findObject(new UiSelector().textContains("Close all tabs"));
|
||||
closetabs.click();
|
||||
}
|
||||
}
|
23
wa/workloads/motionmark/uiauto/build.gradle
Normal file
23
wa/workloads/motionmark/uiauto/build.gradle
Normal file
@ -0,0 +1,23 @@
|
||||
// Top-level build file where you can add configuration options common to all sub-projects/modules.
|
||||
|
||||
buildscript {
|
||||
repositories {
|
||||
jcenter()
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:2.3.2'
|
||||
|
||||
// NOTE: Do not place your application dependencies here; they belong
|
||||
// in the individual module build.gradle files
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
repositories {
|
||||
jcenter()
|
||||
}
|
||||
}
|
||||
|
||||
task clean(type: Delete) {
|
||||
delete rootProject.buildDir
|
||||
}
|
55
wa/workloads/motionmark/uiauto/build.sh
Executable file
55
wa/workloads/motionmark/uiauto/build.sh
Executable file
@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
# Copyright 2018 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
# CD into build dir if possible - allows building from any directory
|
||||
script_path='.'
|
||||
if `readlink -f $0 &>/dev/null`; then
|
||||
script_path=`readlink -f $0 2>/dev/null`
|
||||
fi
|
||||
script_dir=`dirname $script_path`
|
||||
cd $script_dir
|
||||
|
||||
# Ensure gradelw exists before starting
|
||||
if [[ ! -f gradlew ]]; then
|
||||
echo 'gradlew file not found! Check that you are in the right directory.'
|
||||
exit 9
|
||||
fi
|
||||
|
||||
# Copy base class library from wa dist
|
||||
libs_dir=app/libs
|
||||
base_class=`python -c "import os, wa; print os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar')"`
|
||||
mkdir -p $libs_dir
|
||||
cp $base_class $libs_dir
|
||||
|
||||
# Build and return appropriate exit code if failed
|
||||
# gradle build
|
||||
./gradlew clean :app:assembleDebug
|
||||
exit_code=$?
|
||||
if [[ $exit_code -ne 0 ]]; then
|
||||
echo "ERROR: 'gradle build' exited with code $exit_code"
|
||||
exit $exit_code
|
||||
fi
|
||||
|
||||
# If successful move APK file to workload folder (overwrite previous)
|
||||
package=com.arm.wa.uiauto.motionmark
|
||||
rm -f ../$package
|
||||
if [[ -f app/build/apk/$package.apk ]]; then
|
||||
cp app/build/apk/$package.apk ../$package.apk
|
||||
else
|
||||
echo 'ERROR: UiAutomator apk could not be found!'
|
||||
exit 9
|
||||
fi
|
BIN
wa/workloads/motionmark/uiauto/gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
BIN
wa/workloads/motionmark/uiauto/gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
Binary file not shown.
6
wa/workloads/motionmark/uiauto/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
6
wa/workloads/motionmark/uiauto/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
#Wed May 03 15:42:44 BST 2017
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
|
160
wa/workloads/motionmark/uiauto/gradlew
vendored
Executable file
160
wa/workloads/motionmark/uiauto/gradlew
vendored
Executable file
@ -0,0 +1,160 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS=""
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
||||
warn ( ) {
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
die ( ) {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
esac
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD="java"
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=$((i+1))
|
||||
done
|
||||
case $i in
|
||||
(0) set -- ;;
|
||||
(1) set -- "$args0" ;;
|
||||
(2) set -- "$args0" "$args1" ;;
|
||||
(3) set -- "$args0" "$args1" "$args2" ;;
|
||||
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
|
||||
function splitJvmOpts() {
|
||||
JVM_OPTS=("$@")
|
||||
}
|
||||
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
|
||||
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
|
||||
|
||||
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
|
90
wa/workloads/motionmark/uiauto/gradlew.bat
vendored
Normal file
90
wa/workloads/motionmark/uiauto/gradlew.bat
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS=
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windowz variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
if "%@eval[2+2]" == "4" goto 4NT_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
goto execute
|
||||
|
||||
:4NT_args
|
||||
@rem Get arguments from the 4NT Shell from JP Software
|
||||
set CMD_LINE_ARGS=%$
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
1
wa/workloads/motionmark/uiauto/settings.gradle
Normal file
1
wa/workloads/motionmark/uiauto/settings.gradle
Normal file
@ -0,0 +1 @@
|
||||
include ':app'
|
@ -156,5 +156,5 @@ class Openssl(Workload):
|
||||
|
||||
@once
|
||||
def finalize(self, context):
|
||||
if not self.use_system_binary:
|
||||
if not self.use_system_binary and self.uninstall:
|
||||
self.target.uninstall('openssl')
|
||||
|
Binary file not shown.
@ -104,14 +104,14 @@ public class UiAutomation extends BaseUiAutomation {
|
||||
installtext.click();;
|
||||
}
|
||||
UiObject installed =
|
||||
mDevice.findObject(new UiSelector().description("RUN")
|
||||
mDevice.findObject(new UiSelector().text("RUN")
|
||||
.className("android.view.View"));
|
||||
installed.waitForExists(360000);
|
||||
installed.waitForExists(180000);
|
||||
if (!installed.exists()){
|
||||
UiObject installedtext =
|
||||
mDevice.findObject(new UiSelector().text("RUN")
|
||||
UiObject installeddesc =
|
||||
mDevice.findObject(new UiSelector().description("RUN")
|
||||
.className("android.view.View"));
|
||||
installedtext.waitForExists(1000);
|
||||
installeddesc.waitForExists(1000);
|
||||
}
|
||||
}
|
||||
|
||||
@ -120,7 +120,7 @@ public class UiAutomation extends BaseUiAutomation {
|
||||
UiObject run =
|
||||
mDevice.findObject(new UiSelector().resourceId("CONTROL_PCMA_WORK_V2_DEFAULT")
|
||||
.className("android.view.View")
|
||||
.childSelector(new UiSelector().index(1)
|
||||
.childSelector(new UiSelector().text("RUN")
|
||||
.className("android.view.View")));
|
||||
if (run.exists()) {
|
||||
run.click();
|
||||
|
@ -130,8 +130,8 @@ class RtApp(Workload):
|
||||
'''),
|
||||
Parameter('cpus', kind=cpu_mask, default=0, aliases=['taskset_mask'],
|
||||
description='Constrain execution to specific CPUs.'),
|
||||
Parameter('uninstall_on_exit', kind=bool, default=False,
|
||||
description="""
|
||||
Parameter('uninstall', aliases=['uninstall_on_exit'], kind=bool, default=False,
|
||||
override=True, description="""
|
||||
If set to ``True``, rt-app binary will be uninstalled from the device
|
||||
at the end of the run.
|
||||
"""),
|
||||
@ -213,9 +213,10 @@ class RtApp(Workload):
|
||||
|
||||
@once
|
||||
def finalize(self, context):
|
||||
if self.uninstall_on_exit:
|
||||
if self.uninstall:
|
||||
self.target.uninstall(self.target_binary)
|
||||
self.target.execute('rm -rf {}'.format(self.target_working_directory))
|
||||
if self.cleanup_assets:
|
||||
self.target.execute('rm -rf {}'.format(self.target_working_directory))
|
||||
|
||||
def _deploy_rt_app_binary_if_necessary(self):
|
||||
# called from initialize() so gets invoked once per run
|
||||
|
@ -67,4 +67,5 @@ class ShellScript(Workload):
|
||||
wfh.write(self.output)
|
||||
|
||||
def teardown(self, context):
|
||||
self.target.remove(self.on_target_script_file)
|
||||
if self.cleanup_assets:
|
||||
self.target.remove(self.on_target_script_file)
|
||||
|
@ -43,6 +43,8 @@ class Speedometer(UiautoWorkload):
|
||||
''')
|
||||
]
|
||||
|
||||
requires_network = True
|
||||
|
||||
def __init__(self, target, **kwargs):
|
||||
super(Speedometer, self).__init__(target, **kwargs)
|
||||
self.gui.timeout = 1500
|
||||
|
@ -15,11 +15,11 @@
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
|
||||
import os
|
||||
import yaml
|
||||
|
||||
from wa import Workload, Parameter, ConfigError, Executable
|
||||
from wa.framework.exception import WorkloadError
|
||||
from wa.utils.exec_control import once
|
||||
from wa.utils.serializer import yaml
|
||||
|
||||
|
||||
class StressNg(Workload):
|
||||
@ -131,4 +131,5 @@ class StressNg(Workload):
|
||||
|
||||
@once
|
||||
def finalize(self, context):
|
||||
self.target.uninstall('stress-ng')
|
||||
if self.uninstall:
|
||||
self.target.uninstall('stress-ng')
|
||||
|
@ -147,11 +147,13 @@ class Sysbench(Workload):
|
||||
extract_threads_fairness_metric('execution time', next(fh), context.output)
|
||||
|
||||
def teardown(self, context):
|
||||
self.target.remove(self.target_results_file)
|
||||
if self.cleanup_assets:
|
||||
self.target.remove(self.target_results_file)
|
||||
|
||||
@once
|
||||
def finalize(self, context):
|
||||
self.target.uninstall('sysbench')
|
||||
if self.uninstall:
|
||||
self.target.uninstall('sysbench')
|
||||
|
||||
def _build_command(self, **parameters):
|
||||
param_strings = ['--{}={}'.format(k.replace('_', '-'), v)
|
||||
|
52
wa/workloads/uibench/__init__.py
Normal file
52
wa/workloads/uibench/__init__.py
Normal file
@ -0,0 +1,52 @@
|
||||
# Copyright 2013-2019 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from wa import Parameter, ApkWorkload
|
||||
|
||||
|
||||
class Uibench(ApkWorkload):
|
||||
|
||||
name = 'uibench'
|
||||
description = """
|
||||
Runs a particular activity of the UIBench_ workload suite. The suite
|
||||
is provided by Google as a testbench for the Android UI.
|
||||
|
||||
.. _UIBench: https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/tests/UiBench/
|
||||
"""
|
||||
package_names = ['com.android.test.uibench']
|
||||
loading_time = 1
|
||||
|
||||
parameters = [
|
||||
Parameter('activity', kind=str,
|
||||
description="""
|
||||
The UIBench activity to be run. Each activity corresponds to
|
||||
a test. If this parameter is ignored, the application is
|
||||
launched in its main menu. Please note that the available
|
||||
activities vary between versions of UIBench (which follow
|
||||
AOSP versioning) and the availability of the services under
|
||||
test may depend on the version of the target Android. We
|
||||
recommend using the APK of UIBench corresponding to the
|
||||
Android version, enforced through the ``version`` parameter to
|
||||
this workload.
|
||||
"""),
|
||||
Parameter('duration', kind=int, default=10,
|
||||
description="""
|
||||
As activities do not finish, this workload will terminate
|
||||
UIBench after the given duration.
|
||||
"""),
|
||||
]
|
||||
|
||||
def run(self, context):
|
||||
super(Uibench, self).run(context)
|
||||
self.target.sleep(self.duration)
|
132
wa/workloads/uibenchjanktests/__init__.py
Normal file
132
wa/workloads/uibenchjanktests/__init__.py
Normal file
@ -0,0 +1,132 @@
|
||||
# Copyright 2019 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import re
|
||||
|
||||
from wa import Parameter, ApkWorkload, PackageHandler, TestPackageHandler
|
||||
|
||||
|
||||
class Uibenchjanktests(ApkWorkload):
|
||||
|
||||
name = 'uibenchjanktests'
|
||||
description = """
|
||||
Runs a particular test of the UIBench JankTests_ test suite. The suite
|
||||
is provided by Google as an automated version of the UIBench testbench
|
||||
for the Android UI.
|
||||
|
||||
.. _JankTests: https://android.googlesource.com/platform/platform_testing/+/master/tests/jank/uibench/src/com/android/uibench/janktests
|
||||
"""
|
||||
package_names = ['com.android.uibench.janktests']
|
||||
_DUT_PACKAGE = 'com.android.test.uibench'
|
||||
_DEFAULT_CLASS = 'UiBenchJankTests'
|
||||
_OUTPUT_SECTION_REGEX = re.compile(
|
||||
r'(\s*INSTRUMENTATION_STATUS: gfx-[\w-]+=[-+\d.]+\n)+'
|
||||
r'\s*INSTRUMENTATION_STATUS_CODE: (?P<code>[-+\d]+)\n?', re.M)
|
||||
_OUTPUT_GFXINFO_REGEX = re.compile(
|
||||
r'INSTRUMENTATION_STATUS: (?P<name>[\w-]+)=(?P<value>[-+\d.]+)')
|
||||
|
||||
parameters = [
|
||||
Parameter('test', kind=str,
|
||||
description='Test to be run. Defaults to full run.'),
|
||||
Parameter('wait', kind=bool, default=True,
|
||||
description='Forces am instrument to wait until the '
|
||||
'instrumentation terminates before terminating itself. The '
|
||||
'net effect is to keep the shell open until the tests have '
|
||||
'finished. This flag is not required, but if you do not use '
|
||||
'it, you will not see the results of your tests.'),
|
||||
Parameter('raw', kind=bool, default=False,
|
||||
description='Outputs results in raw format. Use this flag '
|
||||
'when you want to collect performance measurements, so that '
|
||||
'they are not formatted as test results. This flag is '
|
||||
'designed for use with the flag -e perf true.'),
|
||||
Parameter('instrument_args', kind=dict, default={},
|
||||
description='Extra arguments for am instrument.'),
|
||||
Parameter('no_hidden_api_checks', kind=bool, default=False,
|
||||
description='Disables restrictions on the use of hidden '
|
||||
'APIs.'),
|
||||
]
|
||||
|
||||
def __init__(self, target, **kwargs):
|
||||
super(Uibenchjanktests, self).__init__(target, **kwargs)
|
||||
|
||||
if 'iterations' not in self.instrument_args:
|
||||
self.instrument_args['iterations'] = 1
|
||||
|
||||
self.dut_apk = PackageHandler(
|
||||
self,
|
||||
package_name=self._DUT_PACKAGE,
|
||||
variant=self.variant,
|
||||
strict=self.strict,
|
||||
version=self.version,
|
||||
force_install=self.force_install,
|
||||
install_timeout=self.install_timeout,
|
||||
uninstall=self.uninstall,
|
||||
exact_abi=self.exact_abi,
|
||||
prefer_host_package=self.prefer_host_package,
|
||||
clear_data_on_reset=self.clear_data_on_reset)
|
||||
self.apk = TestPackageHandler(
|
||||
self,
|
||||
package_name=self.package_name,
|
||||
variant=self.variant,
|
||||
strict=self.strict,
|
||||
version=self.version,
|
||||
force_install=self.force_install,
|
||||
install_timeout=self.install_timeout,
|
||||
uninstall=self.uninstall,
|
||||
exact_abi=self.exact_abi,
|
||||
prefer_host_package=self.prefer_host_package,
|
||||
clear_data_on_reset=self.clear_data_on_reset,
|
||||
instrument_args=self.instrument_args,
|
||||
raw_output=self.raw,
|
||||
instrument_wait=self.wait,
|
||||
no_hidden_api_checks=self.no_hidden_api_checks)
|
||||
|
||||
def initialize(self, context):
|
||||
super(Uibenchjanktests, self).initialize(context)
|
||||
self.dut_apk.initialize(context)
|
||||
self.dut_apk.initialize_package(context)
|
||||
if 'class' not in self.apk.args:
|
||||
class_for_method = dict(self.apk.apk_info.methods)
|
||||
class_for_method[None] = self._DEFAULT_CLASS
|
||||
try:
|
||||
method = class_for_method[self.test]
|
||||
except KeyError as e:
|
||||
msg = 'Unknown test "{}". Known tests:\n\t{}'
|
||||
known_tests = '\n\t'.join(
|
||||
m for m in class_for_method.keys()
|
||||
if m is not None and m.startswith('test'))
|
||||
raise ValueError(msg.format(e, known_tests))
|
||||
klass = '{}.{}'.format(self.package_names[0], method)
|
||||
|
||||
if self.test:
|
||||
klass += '#{}'.format(self.test)
|
||||
self.apk.args['class'] = klass
|
||||
|
||||
def run(self, context):
|
||||
self.apk.start_activity()
|
||||
self.apk.wait_instrument_over()
|
||||
|
||||
def update_output(self, context):
|
||||
super(Uibenchjanktests, self).update_output(context)
|
||||
output = self.apk.instrument_output
|
||||
for section in self._OUTPUT_SECTION_REGEX.finditer(output):
|
||||
if int(section.group('code')) != -1:
|
||||
msg = 'Run failed (INSTRUMENTATION_STATUS_CODE: {}). See log.'
|
||||
raise RuntimeError(msg.format(section.group('code')))
|
||||
for metric in self._OUTPUT_GFXINFO_REGEX.finditer(section.group()):
|
||||
context.add_metric(metric.group('name'), metric.group('value'))
|
||||
|
||||
def teardown(self, context):
|
||||
super(Uibenchjanktests, self).teardown(context)
|
||||
self.dut_apk.teardown()
|
@ -48,11 +48,11 @@ class Vellamo(ApkUiautoWorkload):
|
||||
'3.0': ['Browser', 'Metal', 'Multi'],
|
||||
'3.2.4': ['Browser', 'Metal', 'Multi'],
|
||||
}
|
||||
valid_versions = list(benchmark_types.keys())
|
||||
supported_versions = list(benchmark_types.keys())
|
||||
summary_metrics = None
|
||||
|
||||
parameters = [
|
||||
Parameter('version', kind=str, allowed_values=valid_versions, default=sorted(benchmark_types, reverse=True)[0], override=True,
|
||||
Parameter('version', kind=str, allowed_values=supported_versions, override=True,
|
||||
description=('Specify the version of Vellamo to be run. '
|
||||
'If not specified, the latest available version will be used.')),
|
||||
Parameter('benchmarks', kind=list_of_strs, allowed_values=benchmark_types['3.0'], default=benchmark_types['3.0'],
|
||||
@ -73,8 +73,8 @@ class Vellamo(ApkUiautoWorkload):
|
||||
self.gui.uiauto_params['multicore'] = 'Multi' in self.benchmarks
|
||||
super(Vellamo, self).setup(context)
|
||||
|
||||
def validate(self):
|
||||
super(Vellamo, self).validate()
|
||||
def initialize(self, context):
|
||||
super(Vellamo, self).initialize(context)
|
||||
if self.version == '2.0.3' or not self.benchmarks: # pylint: disable=access-member-before-definition
|
||||
self.benchmarks = self.benchmark_types[self.version] # pylint: disable=attribute-defined-outside-init
|
||||
else:
|
||||
|
Reference in New Issue
Block a user