mirror of
https://github.com/ARM-software/workload-automation.git
synced 2025-09-02 03:12:34 +01:00
Initial commit of open source Workload Automation.
This commit is contained in:
231
wlauto/workloads/bbench/__init__.py
Normal file
231
wlauto/workloads/bbench/__init__.py
Normal file
@@ -0,0 +1,231 @@
|
||||
# Copyright 2012-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# pylint: disable=E1101,W0201
|
||||
import os
|
||||
import time
|
||||
import urllib
|
||||
import tarfile
|
||||
import shutil
|
||||
import json
|
||||
import re
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
from wlauto import settings, Workload, Parameter, Alias, Executable
|
||||
from wlauto.exceptions import ConfigError
|
||||
from wlauto.utils.types import boolean
|
||||
|
||||
DEFAULT_BBENCH_FILE = "http://bbench.eecs.umich.edu/bbench/bbench_2.0.tgz"
|
||||
DOWNLOADED_FILE_NAME = "bbench_2.0.tgz"
|
||||
BBENCH_SERVER_NAME = 'bbench_server'
|
||||
PATCH_FILES = os.path.join(os.path.dirname(__file__), "patches")
|
||||
DEFAULT_AUDIO_FILE = "http://archive.org/download/PachelbelsCanoninD/Canon_in_D_Piano.mp3"
|
||||
DEFAULT_AUDIO_FILE_NAME = 'Canon_in_D_Piano.mp3'
|
||||
|
||||
|
||||
class BBench(Workload):
|
||||
|
||||
name = 'bbench'
|
||||
description = """
|
||||
BBench workload opens the built-in browser and navigates to, and
|
||||
scrolls through, some preloaded web pages and ends the workload by trying to
|
||||
connect to a local server it runs after it starts. It can also play the
|
||||
workload while it plays an audio file in the background.
|
||||
|
||||
"""
|
||||
|
||||
summary_metrics = ['Mean Latency']
|
||||
|
||||
parameters = [
|
||||
Parameter('with_audio', kind=boolean, default=False,
|
||||
description=('Specifies whether an MP3 should be played in the background during '
|
||||
'workload execution.')),
|
||||
Parameter('server_timeout', kind=int, default=300,
|
||||
description='Specifies the timeout (in seconds) before the server is stopped.'),
|
||||
Parameter('force_dependency_push', kind=boolean, default=False,
|
||||
description=('Specifies whether to push dependency files to the device to the device '
|
||||
'if they are already on it.')),
|
||||
Parameter('audio_file', default=os.path.join(settings.dependencies_directory, 'Canon_in_D_Piano.mp3'),
|
||||
description=('The (on-host) path to the audio file to be played. This is only used if '
|
||||
'``with_audio`` is ``True``.')),
|
||||
Parameter('perform_cleanup', kind=boolean, default=False,
|
||||
description='If ``True``, workload files on the device will be deleted after execution.'),
|
||||
Parameter('clear_file_cache', kind=boolean, default=True,
|
||||
description='Clear the the file cache on the target device prior to running the workload.'),
|
||||
Parameter('browser_package', default='com.android.browser',
|
||||
description='Specifies the package name of the device\'s browser app.'),
|
||||
Parameter('browser_activity', default='.BrowserActivity',
|
||||
description='Specifies the startup activity name of the device\'s browser app.'),
|
||||
]
|
||||
|
||||
aliases = [
|
||||
Alias('bbench_with_audio', with_audio=True),
|
||||
]
|
||||
|
||||
def setup(self, context): # NOQA
|
||||
self.bbench_on_device = '/'.join([self.device.working_directory, 'bbench'])
|
||||
self.bbench_server_on_device = os.path.join(self.device.working_directory, BBENCH_SERVER_NAME)
|
||||
self.audio_on_device = os.path.join(self.device.working_directory, DEFAULT_AUDIO_FILE_NAME)
|
||||
self.index_noinput = 'file:///{}'.format(self.bbench_on_device) + '/index_noinput.html'
|
||||
self.luanch_server_command = '{} {}'.format(BBENCH_SERVER_NAME, self.server_timeout)
|
||||
|
||||
if not os.path.isdir(os.path.join(self.dependencies_directory, "sites")):
|
||||
self._download_bbench_file()
|
||||
if self.with_audio and not os.path.isfile(self.audio_file):
|
||||
self._download_audio_file()
|
||||
|
||||
if not os.path.isdir(self.dependencies_directory):
|
||||
raise ConfigError('Bbench directory does not exist: {}'.format(self.dependencies_directory))
|
||||
self._apply_patches()
|
||||
|
||||
if self.with_audio:
|
||||
if self.force_dependency_push or not self.device.file_exists(self.audio_on_device):
|
||||
self.device.push_file(self.audio_file, self.audio_on_device, timeout=120)
|
||||
|
||||
# Push the bbench site pages and http server to target device
|
||||
if self.force_dependency_push or not self.device.file_exists(self.bbench_on_device):
|
||||
self.logger.debug('Copying bbench sites to device.')
|
||||
self.device.push_file(self.dependencies_directory, self.bbench_on_device, timeout=300)
|
||||
|
||||
# Push the bbench server
|
||||
host_binary = context.resolver.get(Executable(self, self.device.abi, 'bbench_server'))
|
||||
self.device.install(host_binary)
|
||||
|
||||
# Open the browser with default page
|
||||
self.device.execute('am start -n {}/{} about:blank'.format(self.browser_package, self.browser_activity))
|
||||
time.sleep(5)
|
||||
|
||||
# Stop the browser if already running and wait for it to stop
|
||||
self.device.execute('am force-stop {}'.format(self.browser_package))
|
||||
time.sleep(5)
|
||||
|
||||
# Clear the logs
|
||||
self.device.clear_logcat()
|
||||
|
||||
# clear browser cache
|
||||
self.device.execute('pm clear {}'.format(self.browser_package))
|
||||
if self.clear_file_cache:
|
||||
self.device.execute('sync')
|
||||
self.device.set_sysfile_value('/proc/sys/vm/drop_caches', 3)
|
||||
|
||||
# Launch the background music
|
||||
if self.with_audio:
|
||||
self.device.execute('am start -W -S -n com.android.music/.MediaPlaybackActivity -d {}'.format(self.audio_on_device))
|
||||
|
||||
def run(self, context):
|
||||
# Launch the bbench
|
||||
self.device.execute('am start -n {}/{} {}'.format(self.browser_package, self.browser_activity, self.index_noinput))
|
||||
time.sleep(5) # WA1 parity
|
||||
# Launch the server waiting for Bbench to complete
|
||||
self.device.execute(self.luanch_server_command, self.server_timeout)
|
||||
|
||||
def update_result(self, context):
|
||||
# Stop the browser
|
||||
self.device.execute('am force-stop {}'.format(self.browser_package))
|
||||
|
||||
# Stop the music
|
||||
if self.with_audio:
|
||||
self.device.execute('am force-stop com.android.music')
|
||||
|
||||
# Get index_no_input.html
|
||||
indexfile = os.path.join(self.device.working_directory, 'bbench/index_noinput.html')
|
||||
self.device.pull_file(indexfile, context.output_directory)
|
||||
|
||||
# Get the logs
|
||||
output_file = os.path.join(self.device.working_directory, 'browser_bbench_logcat.txt')
|
||||
self.device.execute('logcat -v time -d > {}'.format(output_file))
|
||||
self.device.pull_file(output_file, context.output_directory)
|
||||
|
||||
metrics = _parse_metrics(os.path.join(context.output_directory, 'browser_bbench_logcat.txt'),
|
||||
os.path.join(context.output_directory, 'index_noinput.html'),
|
||||
context.output_directory)
|
||||
for key, values in metrics:
|
||||
for i, value in enumerate(values):
|
||||
metric = '{}_{}'.format(key, i) if i else key
|
||||
context.result.add_metric(metric, value, units='ms', lower_is_better=True)
|
||||
|
||||
def teardown(self, context):
|
||||
if self.perform_cleanup:
|
||||
self.device.execute('rm -r {}'.format(self.bbench_on_device))
|
||||
self.device.execute('rm {}'.format(self.audio_on_device))
|
||||
|
||||
def _download_audio_file(self):
|
||||
self.logger.debug('Downloadling audio file.')
|
||||
urllib.urlretrieve(DEFAULT_AUDIO_FILE, self.audio_file)
|
||||
|
||||
def _download_bbench_file(self):
|
||||
# downloading the file to bbench_dir
|
||||
self.logger.debug('Downloading bbench dependencies.')
|
||||
full_file_path = os.path.join(self.dependencies_directory, DOWNLOADED_FILE_NAME)
|
||||
urllib.urlretrieve(DEFAULT_BBENCH_FILE, full_file_path)
|
||||
|
||||
# Extracting Bbench to bbench_dir/
|
||||
self.logger.debug('Extracting bbench dependencies.')
|
||||
tar = tarfile.open(full_file_path)
|
||||
tar.extractall(os.path.dirname(self.dependencies_directory))
|
||||
|
||||
# Removing not needed files and the compressed file
|
||||
os.remove(full_file_path)
|
||||
youtube_dir = os.path.join(self.dependencies_directory, 'sites', 'youtube')
|
||||
os.remove(os.path.join(youtube_dir, 'www.youtube.com', 'kp.flv'))
|
||||
os.remove(os.path.join(youtube_dir, 'kp.flv'))
|
||||
|
||||
def _apply_patches(self):
|
||||
self.logger.debug('Applying patches.')
|
||||
shutil.copy(os.path.join(PATCH_FILES, "bbench.js"), self.dependencies_directory)
|
||||
shutil.copy(os.path.join(PATCH_FILES, "results.html"), self.dependencies_directory)
|
||||
shutil.copy(os.path.join(PATCH_FILES, "index_noinput.html"), self.dependencies_directory)
|
||||
|
||||
|
||||
def _parse_metrics(logfile, indexfile, output_directory): # pylint: disable=R0914
|
||||
regex_bbscore = re.compile(r'(?P<head>\w+)=(?P<val>\w+)')
|
||||
regex_bbmean = re.compile(r'Mean = (?P<mean>[0-9\.]+)')
|
||||
regex_pagescore_head = re.compile(r'metrics:(\w+),(\d+)')
|
||||
regex_pagescore_tail = re.compile(r',(\d+.\d+)')
|
||||
regex_indexfile = re.compile(r'<body onload="startTest\((.*)\)">')
|
||||
settings_dict = defaultdict()
|
||||
|
||||
with open(indexfile) as fh:
|
||||
for line in fh:
|
||||
match = regex_indexfile.search(line)
|
||||
if match:
|
||||
settings_dict['iterations'], settings_dict['scrollDelay'], settings_dict['scrollSize'] = match.group(1).split(',')
|
||||
with open(logfile) as fh:
|
||||
results_dict = defaultdict(list)
|
||||
for line in fh:
|
||||
if 'metrics:Mean' in line:
|
||||
results_list = regex_bbscore.findall(line)
|
||||
results_dict['Mean Latency'].append(regex_bbmean.search(line).group('mean'))
|
||||
if results_list:
|
||||
break
|
||||
elif 'metrics:' in line:
|
||||
page_results = [0]
|
||||
match = regex_pagescore_head.search(line)
|
||||
name, page_results[0] = match.groups()
|
||||
page_results.extend(regex_pagescore_tail.findall(line[match.end():]))
|
||||
for val in page_results[:-2]:
|
||||
results_list.append((name, int(float(val))))
|
||||
|
||||
setting_names = ['siteIndex', 'CGTPreviousTime', 'scrollDelay', 'scrollSize', 'iterations']
|
||||
for k, v in results_list:
|
||||
if k not in setting_names:
|
||||
results_dict[k].append(v)
|
||||
|
||||
sorted_results = sorted(results_dict.items())
|
||||
|
||||
with open(os.path.join(output_directory, 'settings.json'), 'w') as wfh:
|
||||
json.dump(settings_dict, wfh)
|
||||
|
||||
return sorted_results
|
BIN
wlauto/workloads/bbench/bin/arm64/bbench_server
Executable file
BIN
wlauto/workloads/bbench/bin/arm64/bbench_server
Executable file
Binary file not shown.
BIN
wlauto/workloads/bbench/bin/armeabi/bbench_server
Executable file
BIN
wlauto/workloads/bbench/bin/armeabi/bbench_server
Executable file
Binary file not shown.
177
wlauto/workloads/bbench/patches/bbench.js
Normal file
177
wlauto/workloads/bbench/patches/bbench.js
Normal file
@@ -0,0 +1,177 @@
|
||||
//Author: Anthony Gutierrez
|
||||
|
||||
var bb_site = [];
|
||||
var bb_results = [];
|
||||
var globalSiteIndex = 0;
|
||||
var numWebsites = 9;
|
||||
var bb_path = document.location.pathname;
|
||||
var bb_home = "file:///" + bb_path.substr(1, bb_path.lastIndexOf("bbench") + 5);
|
||||
var num_iters = 0;
|
||||
var init = false;
|
||||
|
||||
function generateSiteArray(numTimesToExecute) {
|
||||
for (i = 0; i < numTimesToExecute * numWebsites; i += numWebsites) {
|
||||
bb_site[i+0] = bb_home + "/sites/amazon/www.amazon.com/index.html";
|
||||
bb_site[i+1] = bb_home + "/sites/bbc/www.bbc.co.uk/index.html";
|
||||
bb_site[i+2] = bb_home + "/sites/cnn/www.cnn.com/index.html";
|
||||
bb_site[i+3] = bb_home + "/sites/craigslist/newyork.craigslist.org/index.html";
|
||||
bb_site[i+4] = bb_home + "/sites/ebay/www.ebay.com/index.html";
|
||||
bb_site[i+5] = bb_home + "/sites/google/www.google.com/index.html";
|
||||
// bb_site[i+6] = bb_home + "/sites/youtube/www.youtube.com/index.html";
|
||||
bb_site[i+6] = bb_home + "/sites/msn/www.msn.com/index.html";
|
||||
bb_site[i+7] = bb_home + "/sites/slashdot/slashdot.org/index.html";
|
||||
bb_site[i+8] = bb_home + "/sites/twitter/twitter.com/index.html";
|
||||
// bb_site[i+10] = bb_home + "/sites/espn/espn.go.com/index.html";
|
||||
}
|
||||
|
||||
bb_site[i] = bb_home + "/results.html";
|
||||
}
|
||||
|
||||
|
||||
/* gets the URL parameters and removes from window href */
|
||||
function getAndRemoveURLParams(windowURL, param) {
|
||||
var regex_string = "(.*)(\\?)" + param + "(=)([0-9]+)(&)(.*)";
|
||||
var regex = new RegExp(regex_string);
|
||||
var results = regex.exec(windowURL.value);
|
||||
|
||||
if (results == null)
|
||||
return "";
|
||||
else {
|
||||
windowURL.value = results[1] + results[6];
|
||||
return results[4];
|
||||
}
|
||||
}
|
||||
|
||||
/* gets the URL parameters */
|
||||
function getURLParams(param) {
|
||||
var regex_string = "(.*)(\\?)" + param + "(=)([0-9]+)(&)(.*)";
|
||||
var regex = new RegExp(regex_string);
|
||||
var results = regex.exec(window.location.href);
|
||||
|
||||
if (results == null)
|
||||
return "";
|
||||
else
|
||||
return results[4];
|
||||
}
|
||||
|
||||
/* gets all the parameters */
|
||||
function getAllParams() {
|
||||
var regex_string = "(\\?.*)(\\?siteIndex=)([0-9]+)(&)";
|
||||
var regex = new RegExp(regex_string);
|
||||
var results = regex.exec(window.location.href);
|
||||
/*alert(" Result is 1: " + results[1] + " 2: " + results[2] + " 3: " + results[3]);*/
|
||||
|
||||
if (results == null)
|
||||
return "";
|
||||
else
|
||||
return results[1];
|
||||
}
|
||||
|
||||
/* sets a cookie */
|
||||
function setCookie(c_name, value) {
|
||||
var c_value = escape(value) + ";";
|
||||
document.cookie = c_name + "=" + c_value + " path=/";
|
||||
}
|
||||
|
||||
/* gets a cookie */
|
||||
function getCookie(c_name) {
|
||||
var cookies = document.cookie.split(";");
|
||||
var i, x, y;
|
||||
|
||||
for (i = 0; i < cookies.length; ++i) {
|
||||
x = cookies[i].substr(0, cookies[i].indexOf("="));
|
||||
y = cookies[i].substr(cookies[i].indexOf("=") + 1);
|
||||
x = x.replace(/^\s+|\s+$/g,"");
|
||||
|
||||
if (x == c_name)
|
||||
return unescape(y);
|
||||
}
|
||||
}
|
||||
|
||||
/* start the test, simply go to site 1. */
|
||||
function startTest(n, del, y) {
|
||||
//var start_time = (new Date()).getTime();
|
||||
//setCookie("PreviousTime", start_time);
|
||||
|
||||
init = true;
|
||||
|
||||
generateSiteArray(n);
|
||||
siteTest(bb_site[0], globalSiteIndex, new Date().getTime(), "scrollSize=" + y + "&?scrollDelay=" + del + "&?iterations=" + n + "&?" + "StartPage");
|
||||
//siteTest(bb_site[0], globalSiteIndex, new Date().getTime(), "scrollDelay=" + del + "&?iterations=" + n + "&?" + "StartPage");
|
||||
//goToSite(bb_site[0], new Date().getTime());
|
||||
}
|
||||
|
||||
/* jump to the next site */
|
||||
function goToSite(site) {
|
||||
curr_time = new Date().getTime();
|
||||
setCookie("CGTPreviousTime", curr_time);
|
||||
site+="?CGTPreviousTime="+curr_time+"&";
|
||||
window.location.href = site;
|
||||
}
|
||||
|
||||
/*
|
||||
the test we want to run on the site.
|
||||
for now, simply scroll to the bottom
|
||||
and jump to the next site. in the
|
||||
future we will want to do some more
|
||||
realistic browsing tests.
|
||||
*/
|
||||
function siteTest(nextSite, siteIndex, startTime, siteName) {
|
||||
if (!init) {
|
||||
var iterations = getURLParams("iterations");
|
||||
var params = getAllParams();
|
||||
var delay = getURLParams("scrollDelay");
|
||||
var verticalScroll = getURLParams("scrollSize");
|
||||
generateSiteArray(iterations);
|
||||
nextSite = bb_site[siteIndex] + params;
|
||||
}
|
||||
else {
|
||||
var delay = 500;
|
||||
var verticalScroll = 500;
|
||||
}
|
||||
var cgtPreviousTime = getURLParams("CGTPreviousTime");
|
||||
var load_time = 0;
|
||||
siteIndex++;
|
||||
if (siteIndex > 1) {
|
||||
cur_time = new Date().getTime();
|
||||
// alert("previous " + cgtPreviousTime + " foo " + getCookie("CGTPreviousTime"));
|
||||
load_time = (cur_time - cgtPreviousTime);
|
||||
setCookie("CGTLoadTime", load_time);
|
||||
// diff = cur_time-startTime;
|
||||
// alert("starttime "+startTime+" currtime "+ cur_time + " diff " + diff + "load_time " + load_time );
|
||||
}
|
||||
setTimeout(function() {
|
||||
scrollToBottom(0, verticalScroll, delay,load_time,
|
||||
function(load_time_param){
|
||||
cur_time = new Date().getTime();
|
||||
load_time = (cur_time - startTime);
|
||||
//load_time = (cur_time - getCookie("PreviousTime"));
|
||||
// alert("Done with this site! " + window.cur_time + " " + startTime + " " + window.load_time);
|
||||
//alert("Done with this site! " + window.cur_time + " " + getCookie("PreviousTime") + " " + window.load_time);
|
||||
//goToSite(nextSite + "?iterations=" + iterations + "&?" + siteName + "=" + load_time + "&" + "?siteIndex=" + siteIndex + "&" );
|
||||
// alert("loadtime in cookie="+ getCookie("CGTLoadTime")+" loadtime in var="+load_time_param);
|
||||
goToSite(nextSite + "?" + siteName + "=" + load_time_param + "&" + "?siteIndex=" + siteIndex + "&" );
|
||||
}
|
||||
);},(siteIndex > 1) ? 1000 : 0);
|
||||
}
|
||||
|
||||
/*
|
||||
scroll to the bottom of the page in
|
||||
num_y pixel increments. may want to
|
||||
do some horizontal scrolling in the
|
||||
future as well.
|
||||
*/
|
||||
function scrollToBottom(num_x, num_y, del, load_time, k) {
|
||||
++num_iters;
|
||||
var diff = document.body.scrollHeight - num_y * num_iters;
|
||||
//var num_scrolls = 0;
|
||||
|
||||
if (diff > num_y) {
|
||||
//self.scrollBy(num_x, num_y);
|
||||
//setTimeout(function(){self.scrollBy(num_x, num_y); /*diff -= 100;*/ scrollToBottom(num_x, num_y, k);}, 2);
|
||||
setTimeout(function(){self.scrollBy(num_x, num_y); /*diff -= 100;*/ scrollToBottom(num_x, num_y, del, load_time,k);}, del);
|
||||
}
|
||||
else{
|
||||
k(load_time);
|
||||
}
|
||||
}
|
56
wlauto/workloads/bbench/patches/index_noinput.html
Normal file
56
wlauto/workloads/bbench/patches/index_noinput.html
Normal file
@@ -0,0 +1,56 @@
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1.dtd">
|
||||
<!--
|
||||
Author: Anthony Gutierrez
|
||||
-->
|
||||
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>University of Michigan - BBench 2.0</title>
|
||||
<script type="text/javascript" src="bbench.js"></script>
|
||||
<script type="text/javascript" src="forms.js"></script>
|
||||
</head>
|
||||
|
||||
<body onload="startTest(2,2000,500)">
|
||||
<!--
|
||||
<body>
|
||||
<img src="mich_engin.png" width="35%"/>
|
||||
<h2>University of Michigan BBench version 2.0</h2>
|
||||
|
||||
<form name="config_form">
|
||||
<b>Number of iterations:</b> <input type="text" name="numIterations" value="5" size="4" onchange="setIters();">
|
||||
<input type="button" value="-" name="iterPlusButton" onClick="document.config_form.numIterations.value=numItersDec(); return true;">
|
||||
<input type="button" value="+" name="iterMinusButton" onClick="document.config_form.numIterations.value=numItersInc(); return true;">
|
||||
(Number of times the page set is iterated through.)
|
||||
<br/><br/>
|
||||
|
||||
<b>Scroll Delay (ms):</b> <input type="text" name="scrollDelay" value="0" size="8" onchange="setScrollDelay();">
|
||||
<input type="button" value="-" name="scrollDelayPlusButton" onClick="document.config_form.scrollDelay.value=scrollDelayDec(); return true;">
|
||||
<input type="button" value="+" name="scrollDelayMinusButton" onClick="document.config_form.scrollDelay.value=scrollDelayInc(); return true;">
|
||||
(Number of milliseconds to pause before scrolling.)
|
||||
<br/><br/>
|
||||
|
||||
<b>Scroll Size:</b> <input type="text" name="scrollSize" value="500" size="8" onchange="setScrollSize();">
|
||||
<input type="button" value="-" name="scrollSizePlusButton" onClick="document.config_form.scrollSize.value=scrollSizeDec(); return true;">
|
||||
<input type="button" value="+" name="scrollSizeMinusButton" onClick="document.config_form.scrollSize.value=scrollSizeInc(); return true;">
|
||||
(Number of pixel to scroll.)
|
||||
<br/><br/>
|
||||
</form>
|
||||
|
||||
<p>
|
||||
<b>Click on the start button to begin the benchmark.</b>
|
||||
</p>
|
||||
<button onclick="startTest(numIters, scrollDelay, scrollSize)">start</button>
|
||||
|
||||
<p>
|
||||
If you use BBench in your work please cite our <a href="http://www.eecs.umich.edu/~atgutier/iiswc_2011.pdf">2011 IISWC paper</a>:<br/><br/>
|
||||
|
||||
A. Gutierrez, R.G. Dreslinksi, T.F. Wenisch, T. Mudge, A. Saidi, C. Emmons, and N. Paver. Full-System Analysis and Characterization
|
||||
of Interactive Smartphone Applications. <i>IEEE International Symposium on Workload Characterization</i>, 2011.
|
||||
</p>
|
||||
--!>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
158
wlauto/workloads/bbench/patches/results.html
Normal file
158
wlauto/workloads/bbench/patches/results.html
Normal file
@@ -0,0 +1,158 @@
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1.dtd">
|
||||
<!--
|
||||
Author: Anthony Gutierrez
|
||||
-->
|
||||
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>University of Michigan - BBench 2.0</title>
|
||||
<script type="text/javascript" src="bbench.js"></script>
|
||||
|
||||
<script type="text/javascript">
|
||||
var numTimesToExecute = getURLParams("iterations");
|
||||
|
||||
function closeWindow() {
|
||||
window.open('','_self','');
|
||||
window.close();
|
||||
}
|
||||
|
||||
function averageWarm(siteTimes) {
|
||||
var sum = 0;
|
||||
|
||||
if (numTimesToExecute == 1)
|
||||
return siteTimes[0];
|
||||
|
||||
for (i = 0; i < numTimesToExecute - 1; ++i)
|
||||
sum = eval(sum + siteTimes[i]);
|
||||
|
||||
return (sum / (numTimesToExecute - 1));
|
||||
}
|
||||
|
||||
function stdDevWarm(siteTimes) {
|
||||
var avg = averageWarm(siteTimes)
|
||||
var tmpArray = [];
|
||||
|
||||
if (numTimesToExecute == 1)
|
||||
return 0;
|
||||
|
||||
for (i = 0; i < numTimesToExecute - 1; ++i)
|
||||
tmpArray[i] = Math.pow((siteTimes[i] - avg), 2);
|
||||
|
||||
avg = averageWarm(tmpArray);
|
||||
|
||||
return Math.sqrt(avg);
|
||||
}
|
||||
|
||||
function geoMean(avgTimes) {
|
||||
var prod = 1;
|
||||
|
||||
for (i = 0; i < numWebsites; ++i)
|
||||
prod = eval(prod * avgTimes[i]);
|
||||
|
||||
return Math.pow(prod, (1/numWebsites));
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<img src="mich_engin.png" width="35%"/>
|
||||
<h2>University of Michigan BBench version 2.0</h2>
|
||||
<h3>Results</h3>
|
||||
|
||||
<script type="text/javascript">
|
||||
var bbSiteColdTimes = [];
|
||||
var bbSiteTimes = [];
|
||||
var bbSiteAvgRunTime = [];
|
||||
var bbSiteStdDev = [];
|
||||
var bbSiteCoeffVar = [];
|
||||
var bbSiteNames = ["amazon",
|
||||
"bbc",
|
||||
"cnn",
|
||||
"craigslist",
|
||||
"ebay",
|
||||
// "espn",
|
||||
"google",
|
||||
"msn",
|
||||
"slashdot",
|
||||
"twitter"];
|
||||
// "youtube"];
|
||||
|
||||
var windowURL = new Object();
|
||||
var windowURL2 = new Object();
|
||||
windowURL.value = window.location.href;
|
||||
windowURL2.value = window.location.href;
|
||||
|
||||
for (j = 0; j < numWebsites; ++j) {
|
||||
|
||||
for (i = 0; i < numTimesToExecute; ++i) {
|
||||
var site_time = getAndRemoveURLParams(windowURL, bbSiteNames[j]) - 0;
|
||||
bbSiteTimes[i] = site_time;
|
||||
}
|
||||
|
||||
bbSiteColdTimes[j] = bbSiteTimes[i - 1];
|
||||
bbSiteAvgRunTime[j] = averageWarm(bbSiteTimes);
|
||||
bbSiteStdDev[j] = stdDevWarm(bbSiteTimes);
|
||||
bbSiteCoeffVar[j] = (bbSiteStdDev[j] / bbSiteAvgRunTime[j]) * 100;
|
||||
}
|
||||
|
||||
var bbSiteAvgGeoMean = geoMean(bbSiteAvgRunTime);
|
||||
</script>
|
||||
|
||||
<table border="1">
|
||||
<script type="text/javascript">
|
||||
document.write("<tr align=\"right\"><td>Site Name</td><td>Cold Start Time</td><td>Avg Warm Page Rendering Time (ms)</td><td>Std Dev of Warm Runs</td><td>%Coeff Var of Warm Runs</td>");
|
||||
for (i = 0; i < numWebsites; ++i) {
|
||||
document.write("<tr align=\"right\">");
|
||||
document.write("<td>" + bbSiteNames[i] + "</td>");
|
||||
document.write("<td>" + bbSiteColdTimes[i] + "</td>");
|
||||
document.write("<td>" + bbSiteAvgRunTime[i].toFixed(2) + "</td>");
|
||||
document.write("<td>" + bbSiteStdDev[i].toFixed(2) + "</td>");
|
||||
document.write("<td>" + bbSiteCoeffVar[i].toFixed(2) + "</td>");
|
||||
document.write("</tr>");
|
||||
}
|
||||
</script>
|
||||
</table>
|
||||
|
||||
<br />
|
||||
|
||||
<table border="1">
|
||||
<script type="text/javascript">
|
||||
document.write("<tr><td>Geometric Mean of Average Warm Runs</td><td>" + bbSiteAvgGeoMean.toFixed(2) + "</td></tr>");
|
||||
console.log("metrics:" + "Mean = " + bbSiteAvgGeoMean.toFixed(2) + ":")
|
||||
</script>
|
||||
</table>
|
||||
|
||||
<h3>CSV version of the table:</h3>
|
||||
|
||||
<script type="text/javascript">
|
||||
document.write("Site Name,Cold Start Time, Avg Warm Page Rendering Time (ms),Std Dev of Warm Runs,%Coeff Var of Warm Runs<br />");
|
||||
for (i = 0; i < numWebsites; ++i) {
|
||||
document.write(bbSiteNames[i] + ",");
|
||||
document.write(bbSiteColdTimes[i] + ",");
|
||||
document.write(bbSiteAvgRunTime[i].toFixed(2) + ",");
|
||||
document.write(bbSiteStdDev[i].toFixed(2) + ",");
|
||||
document.write(bbSiteCoeffVar[i].toFixed(2) + "<br />");
|
||||
console.log("metrics:" + bbSiteNames[i] + "," + bbSiteColdTimes[i] + "," + bbSiteAvgRunTime[i].toFixed(2) + "," + bbSiteStdDev[i].toFixed(2) + "," + bbSiteCoeffVar[i].toFixed(2) + ":");
|
||||
}
|
||||
|
||||
document.write("<h3>Individual Site Times:</h3>");
|
||||
for (j = 0; j < numWebsites; ++j) {
|
||||
for (i = 0; i < numTimesToExecute; ++i) {
|
||||
var site_time = getAndRemoveURLParams(windowURL2, bbSiteNames[j]) - 0;
|
||||
bbSiteTimes[i] = site_time;
|
||||
document.write(bbSiteNames[j] + " load time: " + site_time + "<br />");
|
||||
}
|
||||
document.write("<br />");
|
||||
}
|
||||
setTimeout("window.location.href='http://localhost:3030/'", 1);
|
||||
</script>
|
||||
|
||||
<p>
|
||||
<b>Click the return button to go to the start page.</b>
|
||||
</p>
|
||||
<button onclick="window.location.href='index.html'">return</button>
|
||||
</body>
|
||||
|
||||
</html>
|
Reference in New Issue
Block a user