mirror of
https://github.com/ARM-software/workload-automation.git
synced 2025-07-12 10:03:31 +01:00
Compare commits
1 Commits
master
...
dependabot
Author | SHA1 | Date | |
---|---|---|---|
ce30d0726f |
doc/source/user_information
requirements.txtwa
commands
framework
instruments
utils
workloads
geekbench
honorofkings
speedometer
@ -400,7 +400,6 @@ below:
|
||||
no_install: false
|
||||
report: true
|
||||
report_on_target: false
|
||||
mode: write-to-memory
|
||||
csv:
|
||||
extra_columns: null
|
||||
use_all_classifiers: false
|
||||
|
@ -45,7 +45,6 @@ An example agenda can be seen here:
|
||||
no_install: false
|
||||
report: true
|
||||
report_on_target: false
|
||||
mode: write-to-disk
|
||||
csv: # Provide config for the csv augmentation
|
||||
use_all_classifiers: true
|
||||
|
||||
|
@ -3,7 +3,7 @@ certifi==2024.7.4
|
||||
cffi==1.15.1
|
||||
charset-normalizer==3.1.0
|
||||
colorama==0.4.6
|
||||
cryptography==43.0.1
|
||||
cryptography==44.0.1
|
||||
devlib==1.3.4
|
||||
future==0.18.3
|
||||
idna==3.7
|
||||
|
@ -19,7 +19,6 @@
|
||||
# pylint: disable-all
|
||||
|
||||
import sys
|
||||
import platform
|
||||
from subprocess import call, Popen, PIPE
|
||||
|
||||
from devlib.utils.misc import escape_double_quotes
|
||||
@ -73,15 +72,6 @@ class ShowCommand(Command):
|
||||
raise NotFoundError('Could not find plugin or alias "{}"'.format(name))
|
||||
|
||||
if which('pandoc'):
|
||||
if platform.system() == "Darwin":
|
||||
# The version of `man` shipped with macOS does not support `-l`. You need to use GNU man from:
|
||||
# https://formulae.brew.sh/formula/man-db
|
||||
if which("gman") is None:
|
||||
print(rst_output)
|
||||
man = "gman"
|
||||
else:
|
||||
man = "man"
|
||||
|
||||
p = Popen(['pandoc', '-f', 'rst', '-t', 'man'], stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
||||
output, _ = p.communicate(rst_output.encode(sys.stdin.encoding))
|
||||
output = output.decode(sys.stdout.encoding)
|
||||
@ -94,7 +84,7 @@ class ShowCommand(Command):
|
||||
title = '.TH {}{} 7'.format(kind, plugin_name)
|
||||
output = '\n'.join([title, body])
|
||||
|
||||
call('echo "{}" | {} -l -'.format(escape_double_quotes(output), man), shell=True)
|
||||
call('echo "{}" | man -l -'.format(escape_double_quotes(output)), shell=True)
|
||||
else:
|
||||
print(rst_output) # pylint: disable=superfluous-parens
|
||||
|
||||
|
@ -23,7 +23,7 @@ VersionTuple = namedtuple('Version', ['major', 'minor', 'revision', 'dev'])
|
||||
|
||||
version = VersionTuple(3, 4, 0, 'dev1')
|
||||
|
||||
required_devlib_version = VersionTuple(1, 4, 0, 'dev3')
|
||||
required_devlib_version = VersionTuple(1, 4, 0, 'dev2')
|
||||
|
||||
|
||||
def format_version(v):
|
||||
|
@ -584,7 +584,6 @@ class ReventGUI(object):
|
||||
|
||||
def __init__(self, workload, target, setup_timeout, run_timeout,
|
||||
extract_results_timeout, teardown_timeout):
|
||||
self.logger = logging.getLogger(self.__class__.__name__)
|
||||
self.workload = workload
|
||||
self.target = target
|
||||
self.setup_timeout = setup_timeout
|
||||
@ -597,6 +596,7 @@ class ReventGUI(object):
|
||||
self.on_target_run_revent = self.target.get_workpath('{}.run.revent'.format(self.target.model))
|
||||
self.on_target_extract_results_revent = self.target.get_workpath('{}.extract_results.revent'.format(self.target.model))
|
||||
self.on_target_teardown_revent = self.target.get_workpath('{}.teardown.revent'.format(self.target.model))
|
||||
self.logger = logging.getLogger('revent')
|
||||
self.revent_setup_file = None
|
||||
self.revent_run_file = None
|
||||
self.revent_extract_results_file = None
|
||||
@ -629,9 +629,8 @@ class ReventGUI(object):
|
||||
timeout=self.setup_timeout)
|
||||
|
||||
def run(self):
|
||||
self.logger.debug('Replaying "%s" with %d seconds timeout',
|
||||
os.path.basename(self.on_target_run_revent),
|
||||
self.run_timeout)
|
||||
msg = 'Replaying {}'
|
||||
self.logger.debug(msg.format(os.path.basename(self.on_target_run_revent)))
|
||||
self.revent_recorder.replay(self.on_target_run_revent,
|
||||
timeout=self.run_timeout)
|
||||
self.logger.debug('Replay completed.')
|
||||
|
@ -59,12 +59,6 @@ class FilePoller(Instrument):
|
||||
Whether or not the poller will be run as root. This should be
|
||||
used when the file you need to poll can only be accessed by root.
|
||||
"""),
|
||||
Parameter('reopen', kind=bool, default=False,
|
||||
description="""
|
||||
When enabled files will be re-opened with each read. This is
|
||||
useful for some sysfs/debugfs entries that only generate a
|
||||
value when opened.
|
||||
"""),
|
||||
]
|
||||
|
||||
def validate(self):
|
||||
@ -97,17 +91,13 @@ class FilePoller(Instrument):
|
||||
if self.align_with_ftrace:
|
||||
marker_option = '-m'
|
||||
signal.connect(self._adjust_timestamps, signal.AFTER_JOB_OUTPUT_PROCESSED)
|
||||
reopen_option = ''
|
||||
if self.reopen:
|
||||
reopen_option = '-r'
|
||||
self.command = '{} {} -t {} {} -l {} {} > {} 2>{}'.format(target_poller,
|
||||
reopen_option,
|
||||
self.sample_interval * 1000,
|
||||
marker_option,
|
||||
','.join(self.labels),
|
||||
' '.join(self.files),
|
||||
self.target_output_path,
|
||||
self.target_log_path)
|
||||
self.command = '{} -t {} {} -l {} {} > {} 2>{}'.format(target_poller,
|
||||
self.sample_interval * 1000,
|
||||
marker_option,
|
||||
','.join(self.labels),
|
||||
' '.join(self.files),
|
||||
self.target_output_path,
|
||||
self.target_log_path)
|
||||
|
||||
def start(self, context):
|
||||
self.target.kick_off(self.command, as_root=self.as_root)
|
||||
|
Binary file not shown.
Binary file not shown.
@ -77,10 +77,9 @@ int main(int argc, char ** argv) {
|
||||
char *labels;
|
||||
int labelCount = 0;
|
||||
int should_write_marker = 0;
|
||||
int reopen_files = 0;
|
||||
int ret;
|
||||
|
||||
static char usage[] = "usage: %s [-h] [-m] [-r] [-t INTERVAL] FILE [FILE ...]\n"
|
||||
static char usage[] = "usage: %s [-h] [-m] [-t INTERVAL] FILE [FILE ...]\n"
|
||||
"polls FILE(s) every INTERVAL microseconds and outputs\n"
|
||||
"the results in CSV format including a timestamp to STDOUT\n"
|
||||
"\n"
|
||||
@ -88,7 +87,6 @@ int main(int argc, char ** argv) {
|
||||
" -m Insert a marker into ftrace at the time of the first\n"
|
||||
" sample. This marker may be used to align the timestamps\n"
|
||||
" produced by the poller with those of ftrace events.\n"
|
||||
" -r Reopen files on each read (needed for some sysfs/debugfs files)\n"
|
||||
" -t The polling sample interval in microseconds\n"
|
||||
" Defaults to 1000000 (1 second)\n"
|
||||
" -l Comma separated list of labels to use in the CSV\n"
|
||||
@ -96,7 +94,7 @@ int main(int argc, char ** argv) {
|
||||
|
||||
|
||||
//Handling command line arguments
|
||||
while ((c = getopt(argc, argv, "hmrt:l:")) != -1)
|
||||
while ((c = getopt(argc, argv, "hmt:l:")) != -1)
|
||||
{
|
||||
switch(c) {
|
||||
case 'h':
|
||||
@ -106,10 +104,7 @@ int main(int argc, char ** argv) {
|
||||
break;
|
||||
case 'm':
|
||||
should_write_marker = 1;
|
||||
break;
|
||||
case 'r':
|
||||
reopen_files = 1;
|
||||
break;
|
||||
break;
|
||||
case 't':
|
||||
interval = (useconds_t)atoi(optarg);
|
||||
break;
|
||||
@ -189,20 +184,7 @@ int main(int argc, char ** argv) {
|
||||
time_float += ((double)current_time.tv_nsec)/1000/1000/1000;
|
||||
printf("%f", time_float);
|
||||
for (i = 0; i < num_files; i++) {
|
||||
if (reopen_files) {
|
||||
// Close and reopen the file to get fresh data
|
||||
close(files_to_poll[i].fd);
|
||||
files_to_poll[i].fd = open(files_to_poll[i].path, O_RDONLY);
|
||||
if (files_to_poll[i].fd == -1) {
|
||||
fprintf(stderr, "WARNING: Could not reopen \"%s\", got: %s\n",
|
||||
files_to_poll[i].path, strerror(errno));
|
||||
printf(",");
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
lseek(files_to_poll[i].fd, 0, SEEK_SET);
|
||||
}
|
||||
|
||||
lseek(files_to_poll[i].fd, 0, SEEK_SET);
|
||||
bytes_read = read(files_to_poll[i].fd, buf, 1024);
|
||||
|
||||
if (bytes_read < 0) {
|
||||
|
@ -162,13 +162,6 @@ class TraceCmdInstrument(Instrument):
|
||||
installed on the host (the one in your
|
||||
distribution's repos may be too old).
|
||||
"""),
|
||||
Parameter('mode', kind=str, default='write-to-memory',
|
||||
allowed_values=['write-to-disk', 'write-to-memory'],
|
||||
description="""
|
||||
Specifies whether collected traces should be saved in memory or disk.
|
||||
Extensive workloads may hit out of memory issue. Hence, write-to-disk
|
||||
mode can help in such cases.
|
||||
"""),
|
||||
]
|
||||
|
||||
def __init__(self, target, **kwargs):
|
||||
@ -190,7 +183,6 @@ class TraceCmdInstrument(Instrument):
|
||||
no_install=self.no_install,
|
||||
strict=False,
|
||||
report_on_target=False,
|
||||
mode=self.mode,
|
||||
)
|
||||
if self.report and self.report_on_target:
|
||||
collector_params['autoreport'] = True
|
||||
@ -223,14 +215,12 @@ class TraceCmdInstrument(Instrument):
|
||||
if not self.collector:
|
||||
return
|
||||
self.logger.info('Extracting trace from target...')
|
||||
outfile = os.path.join(context.output_directory, OUTPUT_TRACE_FILE)
|
||||
|
||||
outfile = os.path.join(context.output_directory, 'trace.dat')
|
||||
self.collector.set_output(outfile)
|
||||
self.collector.get_data()
|
||||
context.add_artifact('trace-cmd-bin', outfile, 'data')
|
||||
if self.report:
|
||||
textfile = os.path.join(context.output_directory, OUTPUT_TEXT_FILE)
|
||||
|
||||
textfile = os.path.join(context.output_directory, 'trace.txt')
|
||||
if not self.report_on_target:
|
||||
self.collector.report(outfile, textfile)
|
||||
context.add_artifact('trace-cmd-txt', textfile, 'export')
|
||||
|
@ -14,7 +14,6 @@
|
||||
#
|
||||
|
||||
|
||||
import logging
|
||||
import os
|
||||
import struct
|
||||
import signal
|
||||
@ -118,22 +117,22 @@ class ReventRecording(object):
|
||||
Represents a parsed revent recording. This contains input events and device
|
||||
descriptions recorded by revent. Two parsing modes are supported. By
|
||||
default, the recording will be parsed in the "streaming" mode. In this
|
||||
mode, initial headers and device descriptions are parsed on creation and an
|
||||
mode, initial headers and device descritions are parsed on creation and an
|
||||
open file handle to the recording is saved. Events will be read from the
|
||||
file as they are being iterated over. In this mode, the entire recording is
|
||||
never loaded into memory at once. The underlying file may be "released" by
|
||||
calling ``close`` on the recording, after which further iteration over the
|
||||
calling ``close`` on the recroding, after which further iteration over the
|
||||
events will not be possible (but would still be possible to access the file
|
||||
description and header information).
|
||||
|
||||
The alternative is to load the entire recording on creation (in which case
|
||||
the file handle will be closed once the recording is loaded). This can be
|
||||
the file handle will be closed once the recroding is loaded). This can be
|
||||
enabled by specifying ``streaming=False``. This will make it faster to
|
||||
subsequently iterate over the events, and also will not "hold" the file
|
||||
open.
|
||||
|
||||
.. note:: When starting a new iteration over the events in streaming mode,
|
||||
the position in the open file will be automatically reset to the
|
||||
the postion in the open file will be automatically reset to the
|
||||
beginning of the event stream. This means it's possible to iterate
|
||||
over the events multiple times without having to re-open the
|
||||
recording, however it is not possible to do so in parallel. If
|
||||
@ -275,11 +274,10 @@ def get_revent_binary(abi):
|
||||
|
||||
class ReventRecorder(object):
|
||||
|
||||
# Share location of target executable across all instances
|
||||
# Share location of target excutable across all instances
|
||||
target_executable = None
|
||||
|
||||
def __init__(self, target):
|
||||
self.logger = logging.getLogger(self.__class__.__name__)
|
||||
self.target = target
|
||||
if not ReventRecorder.target_executable:
|
||||
ReventRecorder.target_executable = self._get_target_path(self.target)
|
||||
@ -297,8 +295,7 @@ class ReventRecorder(object):
|
||||
self.target.uninstall('revent')
|
||||
|
||||
def start_record(self, revent_file):
|
||||
command = f'{ReventRecorder.target_executable} record -s {revent_file}'
|
||||
self.logger.debug('Executing record command "%s"...', command)
|
||||
command = '{} record -s {}'.format(ReventRecorder.target_executable, revent_file)
|
||||
self.target.kick_off(command, self.target.is_rooted)
|
||||
|
||||
def stop_record(self):
|
||||
@ -306,8 +303,7 @@ class ReventRecorder(object):
|
||||
|
||||
def replay(self, revent_file, timeout=None):
|
||||
self.target.killall('revent')
|
||||
command = f'{ReventRecorder.target_executable} replay {revent_file}'
|
||||
self.logger.debug('Executing replay command "%s" with %d seconds timeout...', command, timeout)
|
||||
command = "{} replay {}".format(ReventRecorder.target_executable, revent_file)
|
||||
self.target.execute(command, timeout=timeout)
|
||||
|
||||
@memoized
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2025 ARM Limited
|
||||
# Copyright 2013-2018 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -20,11 +20,10 @@ import tempfile
|
||||
import json
|
||||
from collections import defaultdict
|
||||
|
||||
from wa import Workload, ApkUiautoWorkload, Parameter
|
||||
from wa import ApkUiautoWorkload, Parameter
|
||||
from wa.framework.exception import ConfigError, WorkloadError
|
||||
from wa.utils.misc import capitalize
|
||||
from wa.utils.types import version_tuple, list_or_integer
|
||||
from wa.utils.exec_control import once
|
||||
from wa.utils.types import version_tuple
|
||||
|
||||
|
||||
class Geekbench(ApkUiautoWorkload):
|
||||
@ -371,233 +370,3 @@ class GeekbenchCorproate(Geekbench): # pylint: disable=too-many-ancestors
|
||||
|
||||
def namemify(basename, i):
|
||||
return basename + (' {}'.format(i) if i else '')
|
||||
|
||||
|
||||
class GeekbenchCmdline(Workload):
|
||||
|
||||
name = "geekbench_cli"
|
||||
description = "Workload for running command line version Geekbench"
|
||||
|
||||
gb6_workloads = {
|
||||
# Single-Core and Multi-Core
|
||||
101: 'File Compression',
|
||||
102: 'Navigation',
|
||||
103: 'HTML5 Browser',
|
||||
104: 'PDF Renderer',
|
||||
105: 'Photo Library',
|
||||
201: 'Clang',
|
||||
202: 'Text Processing',
|
||||
203: 'Asset Compression',
|
||||
301: 'Object Detection',
|
||||
402: 'Object Remover',
|
||||
403: 'HDR',
|
||||
404: 'Photo Filter',
|
||||
501: 'Ray Tracer',
|
||||
502: 'Structure from Motion',
|
||||
# OpenCL and Vulkan
|
||||
303: 'Face Detection',
|
||||
406: 'Edge Detection',
|
||||
407: 'Gaussian Blur',
|
||||
503: 'Feature Matching',
|
||||
504: 'Stereo Matching',
|
||||
601: 'Particle Physics',
|
||||
# Single-Core, Multi-Core, OpenCL, and Vulkan
|
||||
302: 'Background Blur',
|
||||
401: 'Horizon Detection',
|
||||
}
|
||||
|
||||
gb5_workloads = {
|
||||
# Single-Core and Multi-Core
|
||||
101: 'AES-XTS',
|
||||
201: 'Text Compression',
|
||||
202: 'Image Compression',
|
||||
203: 'Navigation',
|
||||
204: 'HTML5',
|
||||
205: 'SQLite',
|
||||
206: 'PDF Rendering',
|
||||
207: 'Text Rendering',
|
||||
208: 'Clang',
|
||||
209: 'Camera',
|
||||
301: 'N-Body Physics',
|
||||
302: 'Rigid Body Physics',
|
||||
307: 'Image Inpainting',
|
||||
308: 'HDR',
|
||||
309: 'Ray Tracing',
|
||||
310: 'Structure from Motion',
|
||||
312: 'Speech Recognition',
|
||||
313: 'Machine Learning',
|
||||
# OpenCL and Vulkan
|
||||
220: 'Sobel',
|
||||
221: 'Canny',
|
||||
222: 'Stereo Matching',
|
||||
230: 'Histogram Equalization',
|
||||
304: 'Depth of Field',
|
||||
311: 'Feature Matching',
|
||||
320: 'Particle Physics',
|
||||
321: 'SFFT',
|
||||
# Single-Core, Multi-Core, OpenCL, and Vulkan
|
||||
303: 'Gaussian Blur',
|
||||
305: 'Face Detection',
|
||||
306: 'Horizon Detection',
|
||||
}
|
||||
|
||||
binary_name = 'geekbench_aarch64'
|
||||
|
||||
allowed_extensions = ['json', 'csv', 'xml', 'html', 'text']
|
||||
|
||||
parameters = [
|
||||
Parameter('cpumask', kind=str, default='',
|
||||
description='CPU mask used by taskset.'),
|
||||
Parameter('section', kind=int, default=1, allowed_values=[1, 4, 9],
|
||||
description="""Run the specified sections. It should be 1 for CPU benchmarks,
|
||||
4 for OpenCL benchmarks and 9 for Vulkan benchmarks."""),
|
||||
Parameter('upload', kind=bool, default=False,
|
||||
description='Upload results to Geekbench Browser'),
|
||||
Parameter('is_single_core', kind=bool, default=True,
|
||||
description='Run workload in single-core or multi-core mode.'),
|
||||
Parameter('workload', kind=list_or_integer, default=301,
|
||||
description='Specify workload to run'),
|
||||
Parameter('iterations', kind=int, default=5,
|
||||
description='Number of iterations'),
|
||||
Parameter('workload_gap', kind=int, default=2000,
|
||||
description='N milliseconds gap between workloads'),
|
||||
Parameter('output_file', kind=str, default='gb_cli.json',
|
||||
description=f"""Specify the name of the output results file.
|
||||
If it is not specified, the output file will be generated as a JSON file.
|
||||
It can be {', '.join(allowed_extensions)} files."""),
|
||||
Parameter('timeout', kind=int, default=2000,
|
||||
description='The test timeout in ms. It should be long for 1000 iterations.'),
|
||||
Parameter('version', kind=str, default='6.3.0',
|
||||
description='Specifies which version of the Geekbench should run.'),
|
||||
]
|
||||
|
||||
def __init__(self, target, **kwargs):
|
||||
super(GeekbenchCmdline, self).__init__(target, **kwargs)
|
||||
self.target_result_json = None
|
||||
self.host_result_json = None
|
||||
self.workloads = self.gb6_workloads
|
||||
self.params = ''
|
||||
self.output = ''
|
||||
self.target_exec_directory = ''
|
||||
self.tar_file_src = ''
|
||||
self.tar_file_dst = ''
|
||||
self.file_exists = False
|
||||
|
||||
def init_resources(self, context):
|
||||
"""
|
||||
Retrieves necessary files to run the benchmark in TAR format.
|
||||
WA will look for `gb_cli_artifacts_<version>.tar` file to deploy them to the
|
||||
working directory. If there is no specified version, it will look for version
|
||||
6.3.0 by default.
|
||||
"""
|
||||
self.deployable_assets = [''.join(['gb_cli_artifacts', '_', self.version, '.tar'])]
|
||||
|
||||
# Create an executables directory
|
||||
self.target_exec_directory = self.target.path.join(self.target.executables_directory, f'gb_cli-{self.version}')
|
||||
self.target.execute("mkdir -p {}".format(self.target_exec_directory))
|
||||
|
||||
# Source and Destination paths for the artifacts tar file
|
||||
self.tar_file_src = self.target.path.join(self.target.working_directory, self.deployable_assets[0])
|
||||
self.tar_file_dst = self.target.path.join(self.target_exec_directory, self.deployable_assets[0])
|
||||
# Check the tar file if it already exists
|
||||
if self.target.file_exists(self.tar_file_dst):
|
||||
self.file_exists = True
|
||||
else:
|
||||
# Get the assets file
|
||||
super(GeekbenchCmdline, self).init_resources(context)
|
||||
|
||||
@once
|
||||
def initialize(self, context):
|
||||
if self.version[0] == '5':
|
||||
self.workloads = self.gb5_workloads
|
||||
# If the tar file does not exist in the target, deploy the assets
|
||||
if not self.file_exists:
|
||||
super(GeekbenchCmdline, self).initialize(context)
|
||||
# Move the tar file to the executables directory
|
||||
self.target.execute(
|
||||
'{} mv {} {}'.format(
|
||||
self.target.busybox, self.tar_file_src, self.tar_file_dst))
|
||||
# Extract the tar file
|
||||
self.target.execute(
|
||||
'{} tar -xf {} -C {}'.format(
|
||||
self.target.busybox, self.tar_file_dst, self.target_exec_directory))
|
||||
|
||||
def setup(self, context):
|
||||
super(GeekbenchCmdline, self).setup(context)
|
||||
|
||||
self.params = ''
|
||||
|
||||
self.params += '--section {} '.format(self.section)
|
||||
if self.section == 1:
|
||||
self.params += '--single-core ' if self.is_single_core else '--multi-core '
|
||||
|
||||
self.params += '--upload ' if self.upload else '--no-upload '
|
||||
|
||||
known_workloads = '\n'.join("{}: {}".format(k, v) for k, v in self.workloads.items())
|
||||
if any([t not in self.workloads.keys() for t in self.workload]):
|
||||
msg = 'Unknown workload(s) specified. Known workloads: {}'
|
||||
raise ValueError(msg.format(known_workloads))
|
||||
|
||||
self.params += '--workload {} '.format(''.join("{},".format(i) for i in self.workload))
|
||||
|
||||
if self.iterations:
|
||||
self.params += '--iterations {} '.format(self.iterations)
|
||||
|
||||
if self.workload_gap:
|
||||
self.params += '--workload-gap {} '.format(self.workload_gap)
|
||||
|
||||
extension = os.path.splitext(self.output_file)[1][1:]
|
||||
if self.output_file and extension not in self.allowed_extensions:
|
||||
msg = f"No allowed extension specified. Allowed extensions: {', '.join(self.allowed_extensions)}"
|
||||
raise ValueError(msg)
|
||||
elif self.output_file:
|
||||
# Output results file with the given name and extension
|
||||
self.target_result_json = os.path.join(self.target_exec_directory, self.output_file)
|
||||
self.params += '--export-{} {}'.format(extension, self.target_result_json)
|
||||
self.host_result_json = os.path.join(context.output_directory, self.output_file)
|
||||
else:
|
||||
# The output file is not specified
|
||||
self.target_result_json = os.path.join(self.target_exec_directory, self.output_file)
|
||||
self.params += '--save {}'.format(self.target_result_json)
|
||||
self.host_result_json = os.path.join(context.output_directory, self.output_file)
|
||||
|
||||
def run(self, context):
|
||||
super(GeekbenchCmdline, self).run(context)
|
||||
taskset = f"taskset {self.cpumask}" if self.cpumask else ""
|
||||
binary = self.target.path.join(self.target_exec_directory, self.binary_name)
|
||||
cmd = '{} {} {}'.format(taskset, binary, self.params)
|
||||
|
||||
try:
|
||||
self.output = self.target.execute(cmd, timeout=self.timeout, as_root=True)
|
||||
except KeyboardInterrupt:
|
||||
self.target.killall(self.binary_name)
|
||||
raise
|
||||
|
||||
def update_output(self, context):
|
||||
super(GeekbenchCmdline, self).update_output(context)
|
||||
if not self.output:
|
||||
return
|
||||
for workload in self.workload:
|
||||
scores = []
|
||||
matches = re.findall(self.workloads[workload] + '(.+\d)', self.output)
|
||||
for match in matches:
|
||||
scores.append(int(re.search(r'\d+', match).group(0)))
|
||||
if self.section == 4:
|
||||
context.add_metric("OpenCL Score " + self.workloads[workload], scores[0])
|
||||
elif self.section == 9:
|
||||
context.add_metric("Vulkan Score " + self.workloads[workload], scores[0])
|
||||
else:
|
||||
context.add_metric("Single-Core Score " + self.workloads[workload], scores[0])
|
||||
if not self.is_single_core:
|
||||
context.add_metric("Multi-Core Score " + self.workloads[workload], scores[1])
|
||||
|
||||
def extract_results(self, context):
|
||||
# Extract results on the target
|
||||
super(GeekbenchCmdline, self).extract_results(context)
|
||||
self.target.pull(self.target_result_json, self.host_result_json)
|
||||
context.add_artifact('GeekbenchCmdline_results', self.host_result_json, kind='raw')
|
||||
|
||||
@once
|
||||
def finalize(self, context):
|
||||
if self.cleanup_assets:
|
||||
self.target.remove(self.target_exec_directory)
|
||||
|
@ -1,61 +0,0 @@
|
||||
# Copyright 2025 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
from wa import ApkReventWorkload, Parameter
|
||||
|
||||
|
||||
class HoK(ApkReventWorkload):
|
||||
name = 'honorofkings'
|
||||
uninstall = False
|
||||
clear_data_on_reset = False # Don't clear assets on exit
|
||||
requires_network = True # The game requires network connection
|
||||
description = (
|
||||
'Launch a match replay in Honor of Kings.\n\n'
|
||||
'The game must already have a user logged in and the plugins downloaded.'
|
||||
)
|
||||
package_names = [
|
||||
'com.levelinfinite.sgameGlobal',
|
||||
'com.tencent.tmgp.sgame',
|
||||
]
|
||||
|
||||
parameters = [
|
||||
Parameter(
|
||||
'activity',
|
||||
kind=str,
|
||||
default='.SGameGlobalActivity',
|
||||
description='Activity name of Honor of Kings game.',
|
||||
),
|
||||
Parameter(
|
||||
'replay_file',
|
||||
kind=str,
|
||||
default='replay.abc',
|
||||
description='Honor of Kings Replay file name.',
|
||||
),
|
||||
]
|
||||
|
||||
def setup(self, context):
|
||||
upload_dir = self.target.path.join(
|
||||
self.target.external_storage_app_dir,
|
||||
self.apk.apk_info.package,
|
||||
'files',
|
||||
'Replay'
|
||||
)
|
||||
replay_file = os.path.join(self.dependencies_directory, self.replay_file)
|
||||
self.logger.debug('Uploading "%s" to "%s"...', replay_file, upload_dir)
|
||||
self.target.push(replay_file, upload_dir)
|
||||
|
||||
super().setup(context)
|
@ -1,10 +1,7 @@
|
||||
The speedometer_archive.tgz file is a tarball containing the following archives from WebKit:
|
||||
|
||||
the PerformanceTests/Speedometer directory state taken from https://github.com/WebKit/webkit as of:
|
||||
2.0:
|
||||
commit 5f402692d5f3406527dc107b5d20cc47dac929e8 Tue Jul 14 14:06:17 2020 +0000
|
||||
3.0:
|
||||
commit 734c49b3d075dcc33f56becf3bde8aca5245b719 Mon Feb 24 09:00:53 2025 -0800
|
||||
|
||||
WebKit is open source software with portions licensed under the LGPL and BSD
|
||||
licenses available at https://webkit.org/licensing-webkit/
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright 2014-2025 ARM Limited
|
||||
# Copyright 2014-2018 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -14,10 +14,10 @@
|
||||
#
|
||||
from collections import defaultdict
|
||||
from http.server import SimpleHTTPRequestHandler, HTTPServer
|
||||
from urllib.parse import urlencode
|
||||
import lzma
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import tarfile
|
||||
import tempfile
|
||||
import threading
|
||||
@ -36,7 +36,7 @@ class Speedometer(Workload):
|
||||
|
||||
name = "speedometer"
|
||||
description = """
|
||||
A workload to execute the speedometer web based benchmark. Requires device to be rooted.
|
||||
A workload to execute the speedometer 2.0 web based benchmark. Requires device to be rooted.
|
||||
This workload will only with Android 9+ devices if connected via TCP, or Android 5+ if
|
||||
connected via USB.
|
||||
|
||||
@ -54,15 +54,14 @@ class Speedometer(Workload):
|
||||
|
||||
1. Run 'git clone https://github.com/WebKit/webkit'
|
||||
|
||||
2. Copy PerformanceTests/Speedometer to a directory called document_root, renaming Speedometer
|
||||
to Speedometer<version>. For example, Speedometer2.0.
|
||||
2. Copy PerformanceTests/Speedometer to a directory called document_root, renaming Speedometer to Speedometer2.0
|
||||
|
||||
3. Modify document_root/Speedometer<version>/index.html:
|
||||
3. Modify document_root/Speedometer2.0/index.html:
|
||||
|
||||
3a. (Skip for v3.0) Remove the 'defer' attribute from the <script> tags within the <head> section.
|
||||
3a. Remove the 'defer' attribute from the <script> tags within the <head> section.
|
||||
3b. Add '<script>startTest();</script>' to the very end of the <body> section.
|
||||
|
||||
4. Modify document_root/Speedometer<version>/resources/main.js (it's main.mjs for 3.0):
|
||||
4. Modify document_root/Speedometer2.0/resources/main.js:
|
||||
|
||||
4a. Add the listed code after this line:
|
||||
|
||||
@ -82,7 +81,7 @@ class Speedometer(Workload):
|
||||
}
|
||||
}
|
||||
|
||||
5. Run 'tar -cpzf speedometer_archive-<version>.tar document_root; xz --format=lzma -9 -e speedometer_archive-<version>.tar'
|
||||
5. Run 'tar -cpzf speedometer_archive.tgz document_root'
|
||||
|
||||
6. Copy the tarball into the workloads/speedometer directory
|
||||
|
||||
@ -121,15 +120,6 @@ class Speedometer(Workload):
|
||||
The app package for the browser that will be launched.
|
||||
""",
|
||||
),
|
||||
Parameter(
|
||||
"version",
|
||||
allowed_values=["2.0", "3.0"],
|
||||
kind=str,
|
||||
default="2.0",
|
||||
description="""
|
||||
Speedometer version to run. Currently supports 2.0 and 3.0.
|
||||
""",
|
||||
),
|
||||
]
|
||||
|
||||
def __init__(self, target, **kwargs):
|
||||
@ -164,16 +154,13 @@ class Speedometer(Workload):
|
||||
Speedometer.document_root = os.path.join(self.temp_dir.name, "document_root")
|
||||
|
||||
# Host a copy of Speedometer locally
|
||||
tarball = context.get_resource(File(self, f"speedometer_archive-{self.version}.tar.lzma"))
|
||||
with lzma.open(tarball) as lzma_handle:
|
||||
with tarfile.open(fileobj=lzma_handle) as handle:
|
||||
safe_extract(handle, self.temp_dir.name)
|
||||
|
||||
tarball = context.get_resource(File(self, "speedometer_archive.tgz"))
|
||||
with tarfile.open(name=tarball) as handle:
|
||||
safe_extract(handle, self.temp_dir.name)
|
||||
self.archive_server.start(self.document_root)
|
||||
|
||||
Speedometer.speedometer_url = "http://localhost:{}/Speedometer{}/index.html".format(
|
||||
self.archive_server.get_port(),
|
||||
self.version,
|
||||
Speedometer.speedometer_url = "http://localhost:{}/Speedometer2.0/index.html".format(
|
||||
self.archive_server.get_port()
|
||||
)
|
||||
|
||||
def setup(self, context):
|
||||
@ -253,14 +240,10 @@ class Speedometer(Workload):
|
||||
# Generate a UUID to search for in the browser's local storage to find out
|
||||
# when the workload has ended.
|
||||
report_end_id = uuid.uuid4().hex
|
||||
url_with_unique_id = "{}?reportEndId={}".format(
|
||||
self.speedometer_url, report_end_id
|
||||
)
|
||||
|
||||
query_params = {"reportEndId": report_end_id}
|
||||
# Speedometer 3.0 does not start the test automatically, so we need to
|
||||
# pass the "startAutomatically=true" parameter.
|
||||
if self.version == "3.0":
|
||||
query_params["startAutomatically"] = "true"
|
||||
|
||||
url_with_unique_id = f"{self.speedometer_url}?{urlencode(query_params)}"
|
||||
browser_launch_cmd = "am start -a android.intent.action.VIEW -d '{}' {}".format(
|
||||
url_with_unique_id, self.chrome_package
|
||||
)
|
||||
@ -292,7 +275,6 @@ class Speedometer(Workload):
|
||||
benchmark_complete = False
|
||||
while not benchmark_complete:
|
||||
if self.target_file_was_created(local_storage):
|
||||
candidate_files = []
|
||||
if (
|
||||
iterations % (find_period_s // sleep_period_s) == 0
|
||||
or not local_storage_seen
|
||||
@ -326,12 +308,12 @@ class Speedometer(Workload):
|
||||
iterations += 1
|
||||
|
||||
if iterations > ((timeout_period_m * 60) // sleep_period_s):
|
||||
# We've been waiting <timeout_period_m> minutes for Speedometer to finish running - give up.
|
||||
# We've been waiting 15 minutes for Speedometer to finish running - give up.
|
||||
if not local_storage_seen:
|
||||
raise WorkloadError(
|
||||
f"Speedometer did not complete within {timeout_period_m} minutes - Local Storage wasn't found"
|
||||
"Speedometer did not complete within 15m - Local Storage wasn't found"
|
||||
)
|
||||
raise WorkloadError(f"Speedometer did not complete within {timeout_period_m} minutes.")
|
||||
raise WorkloadError("Speedometer did not complete within 15 minutes.")
|
||||
|
||||
time.sleep(sleep_period_s)
|
||||
|
||||
|
Binary file not shown.
Binary file not shown.
BIN
wa/workloads/speedometer/speedometer_archive.tgz
Normal file
BIN
wa/workloads/speedometer/speedometer_archive.tgz
Normal file
Binary file not shown.
Reference in New Issue
Block a user