1
0
mirror of https://github.com/ARM-software/workload-automation.git synced 2025-01-18 20:11:20 +00:00

geekbench: Port Geekbench workload from WA2

Differences from original:
- Standard renaming and rearrangement of imports
- UiAutomation.java now uses the setup() and runWorkload() phases which weren't
  present before.
This commit is contained in:
Brendan Jackman 2017-09-28 16:03:35 +01:00
parent e5f13076ac
commit 84404da471
12 changed files with 970 additions and 0 deletions

View File

@ -0,0 +1,412 @@
# Copyright 2013-2017 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=E1101
import os
import re
import tempfile
import json
from wa import ApkUiautoWorkload, Parameter
from wa.framework.exception import ConfigError, WorkloadError
from wa.framework.plugin import Artifact
from wa.utils.misc import capitalize
class Geekbench(ApkUiautoWorkload):
name = 'geekbench'
description = """
Geekbench provides a comprehensive set of benchmarks engineered to quickly
and accurately measure processor and memory performance.
http://www.primatelabs.com/geekbench/
From the website:
Designed to make benchmarks easy to run and easy to understand, Geekbench
takes the guesswork out of producing robust and reliable benchmark results.
Geekbench scores are calibrated against a baseline score of 1,000 (which is
the score of a single-processor Power Mac G5 @ 1.6GHz). Higher scores are
better, with double the score indicating double the performance.
The benchmarks fall into one of four categories:
- integer performance.
- floating point performance.
- memory performance.
- stream performance.
Geekbench benchmarks: http://www.primatelabs.com/geekbench/doc/benchmarks.html
Geekbench scoring methedology:
http://support.primatelabs.com/kb/geekbench/interpreting-geekbench-scores
"""
summary_metrics = ['score', 'multicore_score']
versions = {
'4.0.1': {
'package': 'com.primatelabs.geekbench',
'activity': '.HomeActivity',
},
# Version 3.4.1 was the final version 3 variant
'3.4.1': {
'package': 'com.primatelabs.geekbench',
'activity': '.HomeActivity',
},
'3.0.0': {
'package': 'com.primatelabs.geekbench3',
'activity': '.HomeActivity',
},
'2': {
'package': 'ca.primatelabs.geekbench2',
'activity': '.HomeActivity',
},
}
begin_regex = re.compile(r'^\s*D/WebViewClassic.loadDataWithBaseURL\(\s*\d+\s*\)'
r'\s*:\s*(?P<content>\<.*)\s*$')
replace_regex = re.compile(r'<[^>]*>')
parameters = [
Parameter('version', default=sorted(versions.keys())[-1], allowed_values=sorted(versions.keys()),
description='Specifies which version of the workload should be run.',
override=True),
Parameter('times', kind=int, default=1,
description=('Specfies the number of times the benchmark will be run in a "tight '
'loop", i.e. without performaing setup/teardown inbetween.')),
Parameter('timeout', kind=int, default=900,
description=('Timeout for a single iteration of the benchmark. This value is '
'multiplied by ``times`` to calculate the overall run timeout. ')),
Parameter('disable_update_result', kind=bool, default=False,
description=('If ``True`` the results file will not be pulled from the targets '
'/data/data/com.primatelabs.geekbench folder. This allows the '
'workload to be run on unrooted targets and the results extracted '
'manually later.')),
]
is_corporate = False
@property
def activity(self):
return self.versions[self.version]['activity']
@property
def package(self):
return self.versions[self.version]['package']
def __init__(self, *args, **kwargs):
super(Geekbench, self).__init__(*args, **kwargs)
self.gui.uiauto_params['version'] = self.version
self.gui.uiauto_params['times'] = self.times
self.gui.uiauto_params['is_corporate'] = self.is_corporate
def setup(self, context):
super(Geekbench, self).setup(context)
self.run_timeout = self.timeout * self.times
self.exact_apk_version = self.version
def update_output(self, context):
super(Geekbench, self).update_output(context)
if not self.disable_update_result:
major_version = versiontuple(self.version)[0]
update_method = getattr(self, 'update_result_{}'.format(major_version))
update_method(context)
def validate(self):
if (self.times > 1) and (self.version == '2'):
raise ConfigError('times parameter is not supported for version 2 of Geekbench.')
def update_result_2(self, context):
score_calculator = GBScoreCalculator()
score_calculator.parse(self.logcat_log)
score_calculator.update_results(context)
def update_result_3(self, context):
outfile_glob = self.target.path.join(self.target.package_data_directory, self.package, 'files', '*gb3')
on_target_output_files = [f.strip() for f in self.target.execute('ls {}'.format(outfile_glob),
as_root=True).split('\n') if f]
for i, on_target_output_file in enumerate(on_target_output_files):
host_temp_file = tempfile.mktemp()
self.target.pull(on_target_output_file, host_temp_file)
host_output_file = os.path.join(context.output_directory, os.path.basename(on_target_output_file))
with open(host_temp_file) as fh:
data = json.load(fh)
os.remove(host_temp_file)
with open(host_output_file, 'w') as wfh:
json.dump(data, wfh, indent=4)
context.iteration_artifacts.append(Artifact('geekout', path=os.path.basename(on_target_output_file),
kind='data',
description='Geekbench 3 output from target.'))
context.result.add_metric(namemify('score', i), data['score'])
context.result.add_metric(namemify('multicore_score', i), data['multicore_score'])
for section in data['sections']:
context.result.add_metric(namemify(section['name'] + '_score', i), section['score'])
context.result.add_metric(namemify(section['name'] + '_multicore_score', i),
section['multicore_score'])
def update_result_4(self, context):
outfile_glob = self.target.path.join(self.target.package_data_directory, self.package, 'files', '*gb4')
on_target_output_files = [f.strip() for f in self.target.execute('ls {}'.format(outfile_glob),
as_root=True).split('\n') if f]
for i, on_target_output_file in enumerate(on_target_output_files):
host_temp_file = tempfile.mktemp()
self.target.pull(on_target_output_file, host_temp_file)
host_output_file = os.path.join(context.output_directory, os.path.basename(on_target_output_file))
with open(host_temp_file) as fh:
data = json.load(fh)
os.remove(host_temp_file)
with open(host_output_file, 'w') as wfh:
json.dump(data, wfh, indent=4)
context.add_artifact('geekout', host_output_file, kind='data',
description='Geekbench 4 output from target.')
context.add_metric(namemify('score', i), data['score'])
context.add_metric(namemify('multicore_score', i), data['multicore_score'])
for section in data['sections']:
context.add_metric(namemify(section['name'] + '_score', i), section['score'])
for workloads in section['workloads']:
workload_name = workloads['name'].replace(" ", "-")
context.add_metric(namemify(section['name'] + '_' + workload_name + '_score', i),
workloads['score'])
class GBWorkload(object):
"""
Geekbench workload (not to be confused with WA's workloads). This is a single test run by
geek bench, such as preforming compression or generating Madelbrot.
"""
# Index maps onto the hundreds digit of the ID.
categories = [None, 'integer', 'float', 'memory', 'stream']
# 2003 entry-level Power Mac G5 is considered to have a baseline score of
# 1000 for every category.
pmac_g5_base_score = 1000
units_conversion_map = {
'K': 1,
'M': 1000,
'G': 1000000,
}
def __init__(self, wlid, name, pmac_g5_st_score, pmac_g5_mt_score):
"""
:param wlid: A three-digit workload ID. Uniquely identifies a workload and also
determines the category a workload belongs to.
:param name: The name of the workload.
:param pmac_g5_st_score: Score achieved for this workload on 2003 entry-level
Power Mac G5 running in a single thread.
:param pmac_g5_mt_score: Score achieved for this workload on 2003 entry-level
Power Mac G5 running in multiple threads.
"""
self.wlid = wlid
self.name = name
self.pmac_g5_st_score = pmac_g5_st_score
self.pmac_g5_mt_score = pmac_g5_mt_score
self.category = self.categories[int(wlid) // 100]
self.collected_results = []
def add_result(self, value, units):
self.collected_results.append(self.convert_to_kilo(value, units))
def convert_to_kilo(self, value, units):
return value * self.units_conversion_map[units[0]]
def clear(self):
self.collected_results = []
def get_scores(self):
"""
Returns a tuple (single-thraded score, multi-threaded score) for this workload.
Some workloads only have a single-threaded score, in which case multi-threaded
score will be ``None``.
Geekbench will perform four iterations of each workload in single-threaded and,
for some workloads, multi-threaded configurations. Thus there should always be
either four or eight scores collected for each workload. Single-threaded iterations
are always done before multi-threaded, so the ordering of the scores can be used
to determine which configuration they belong to.
This method should not be called before score collection has finished.
"""
no_of_results = len(self.collected_results)
if no_of_results == 4:
return (self._calculate(self.collected_results[:4], self.pmac_g5_st_score), None)
if no_of_results == 8:
return (self._calculate(self.collected_results[:4], self.pmac_g5_st_score),
self._calculate(self.collected_results[4:], self.pmac_g5_mt_score))
else:
msg = 'Collected {} results for Geekbench {} workload;'.format(no_of_results, self.name)
msg += ' expecting either 4 or 8.'
raise WorkloadError(msg)
def _calculate(self, values, scale_factor):
return max(values) * self.pmac_g5_base_score / scale_factor
def __str__(self):
return self.name
__repr__ = __str__
class GBScoreCalculator(object):
"""
Parses logcat output to extract raw Geekbench workload values and converts them into
category and overall scores.
"""
result_regex = re.compile(r'workload (?P<id>\d+) (?P<value>[0-9.]+) '
r'(?P<units>[a-zA-Z/]+) (?P<time>[0-9.]+)s')
# Indicates contribution to the overall score.
category_weights = {
'integer': 0.3357231,
'float': 0.3594,
'memory': 0.1926489,
'stream': 0.1054738,
}
#pylint: disable=C0326
workloads = [
# ID Name Power Mac ST Power Mac MT
GBWorkload(101, 'Blowfish', 43971, 40979),
GBWorkload(102, 'Text Compress', 3202, 3280),
GBWorkload(103, 'Text Decompress', 4112, 3986),
GBWorkload(104, 'Image Compress', 8272, 8412),
GBWorkload(105, 'Image Decompress', 16800, 16330),
GBWorkload(107, 'Lua', 385, 385),
GBWorkload(201, 'Mandelbrot', 665589, 653746),
GBWorkload(202, 'Dot Product', 481449, 455422),
GBWorkload(203, 'LU Decomposition', 889933, 877657),
GBWorkload(204, 'Primality Test', 149394, 185502),
GBWorkload(205, 'Sharpen Image', 2340, 2304),
GBWorkload(206, 'Blur Image', 791, 787),
GBWorkload(302, 'Read Sequential', 1226708, None),
GBWorkload(304, 'Write Sequential', 683782, None),
GBWorkload(306, 'Stdlib Allocate', 3739, None),
GBWorkload(307, 'Stdlib Write', 2070681, None),
GBWorkload(308, 'Stdlib Copy', 1030360, None),
GBWorkload(401, 'Stream Copy', 1367892, None),
GBWorkload(402, 'Stream Scale', 1296053, None),
GBWorkload(403, 'Stream Add', 1507115, None),
GBWorkload(404, 'Stream Triad', 1384526, None),
]
def __init__(self):
self.workload_map = {wl.wlid: wl for wl in self.workloads}
def parse(self, filepath):
"""
Extract results from the specified file. The file should contain a logcat log of Geekbench execution.
Iteration results in the log appear as 'I/geekbench' category entries in the following format::
| worklod ID value units timing
| \------------- | ----/ ---/
| | | | |
| I/geekbench(29026): [....] workload 101 132.9 MB/sec 0.0300939s
| | |
| | -----\
| label random crap we don't care about
"""
for wl in self.workloads:
wl.clear()
with open(filepath) as fh:
for line in fh:
match = self.result_regex.search(line)
if match:
wkload = self.workload_map[int(match.group('id'))]
wkload.add_result(float(match.group('value')), match.group('units'))
def update_results(self, context):
"""
http://support.primatelabs.com/kb/geekbench/interpreting-geekbench-2-scores
From the website:
Each workload's performance is compared against a baseline to determine a score. These
scores are averaged together to determine an overall, or Geekbench, score for the system.
Geekbench uses the 2003 entry-level Power Mac G5 as the baseline with a score of 1,000
points. Higher scores are better, with double the score indicating double the performance.
Geekbench provides three different kinds of scores:
:Workload Scores: Each time a workload is executed Geekbench calculates a score based
on the computer's performance compared to the baseline
performance. There can be multiple workload scores for the
same workload as Geekbench can execute each workload multiple
times with different settings. For example, the "Dot Product"
workload is executed four times (single-threaded scalar code,
multi-threaded scalar code, single-threaded vector code, and
multi-threaded vector code) producing four "Dot Product" scores.
:Section Scores: A section score is the average of all the workload scores for
workloads that are part of the section. These scores are useful
for determining the performance of the computer in a particular
area. See the section descriptions above for a summary on what
each section measures.
:Geekbench Score: The Geekbench score is the weighted average of the four section
scores. The Geekbench score provides a way to quickly compare
performance across different computers and different platforms
without getting bogged down in details.
"""
scores_by_category = defaultdict(list)
for wkload in self.workloads:
st_score, mt_score = wkload.get_scores()
scores_by_category[wkload.category].append(st_score)
context.result.add_metric(wkload.name + ' (single-threaded)', int(st_score))
if mt_score is not None:
scores_by_category[wkload.category].append(mt_score)
context.result.add_metric(wkload.name + ' (multi-threaded)', int(mt_score))
overall_score = 0
for category in scores_by_category:
scores = scores_by_category[category]
category_score = sum(scores) / len(scores)
overall_score += category_score * self.category_weights[category]
context.result.add_metric(capitalize(category) + ' Score', int(category_score))
context.result.add_metric('Geekbench Score', int(overall_score))
class GeekbenchCorproate(Geekbench):
name = "geekbench-corporate"
is_corporate = True
versions = ['4.1.0']
# The activity name for this version doesn't match the package name
activity = 'com.primatelabs.geekbench.HomeActivity'
package = 'com.primatelabs.geekbench4.corporate'
parameters = [
Parameter('version',
default=sorted(versions)[-1], allowed_values=versions,
override=True)
]
def namemify(basename, i):
return basename + (' {}'.format(i) if i else '')
def versiontuple(v):
return tuple(map(int, (v.split("."))))

Binary file not shown.

View File

@ -0,0 +1,35 @@
apply plugin: 'com.android.application'
def packageName = "com.arm.wa.uiauto.geekbench"
android {
compileSdkVersion 25
buildToolsVersion "25.0.3"
defaultConfig {
applicationId "${packageName}"
minSdkVersion 18
targetSdkVersion 25
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
applicationVariants.all { variant ->
variant.outputs.each { output ->
output.outputFile = file("$project.buildDir/apk/${packageName}.apk")
}
}
}
}
dependencies {
compile fileTree(dir: 'libs', include: ['*.jar'])
compile 'com.android.support.test:runner:0.5'
compile 'com.android.support.test:rules:0.5'
compile 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'
compile(name: 'uiauto', ext:'aar')
}
repositories {
flatDir {
dirs 'libs'
}
}

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.arm.wa.uiauto.geekbench"
android:versionCode="1"
android:versionName="1.0">
<instrumentation
android:name="android.support.test.runner.AndroidJUnitRunner"
android:targetPackage="${applicationId}"/>
</manifest>

View File

@ -0,0 +1,187 @@
/* Copyright 2013-2015 ARM Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arm.wa.uiauto.geekbench;
import android.app.Activity;
import android.os.Bundle;
import android.support.test.runner.AndroidJUnit4;
import android.support.test.uiautomator.UiObject;
import android.support.test.uiautomator.UiObjectNotFoundException;
import android.support.test.uiautomator.UiSelector;
import android.view.KeyEvent;
import com.arm.wa.uiauto.BaseUiAutomation;
// import com.arm.wa.uiauto.UxPerfUiAutomation;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.TimeUnit;
@RunWith(AndroidJUnit4.class)
public class UiAutomation extends BaseUiAutomation {
public static String TAG = "geekbench";
public static final long WAIT_TIMEOUT_5SEC = TimeUnit.SECONDS.toMillis(5);
public static final long WAIT_TIMEOUT_20MIN = TimeUnit.SECONDS.toMillis(20 * 60);
Bundle params;
String[] version;
Integer majorVersion;
Integer minorVersion;
Boolean isCorporate;
Integer times;
@Before
public void initialize() {
params = getParams();
version = params.getString("version").split("\\.");
majorVersion = Integer.parseInt(version[0]);
minorVersion = Integer.parseInt(version[1]);
isCorporate = params.getBoolean("is_corporate");
times = params.getInt("times");
}
@Test
@Override
public void setup() throws Exception {
initialize_instrumentation();
if (!isCorporate)
dismissEula();
}
@Test
@Override
public void runWorkload() throws Exception {
for (int i = 0; i < times; i++) {
switch (majorVersion) {
case 2:
// In version 2, we scroll through the results WebView to make sure
// all results appear on the screen, which causes them to be dumped into
// logcat by the Linaro hacks.
runBenchmarks();
waitForResultsv2();
scrollThroughResults();
break;
case 3:
runBenchmarks();
waitForResultsv3onwards();
if (minorVersion < 4) {
// Attempting to share the results will generate the .gb3 file with
// results that can then be pulled from the device. This is not possible
// in verison 2 of Geekbench (Share option was added later).
// Sharing is not necessary from 3.4.1 onwards as the .gb3 files are always
// created.
shareResults();
}
break;
case 4:
runCpuBenchmarks(isCorporate);
waitForResultsv3onwards();
break;
default :
throw new RuntimeException("Invalid version of Geekbench requested");
}
if (i < (times - 1)) {
mDevice.pressBack();
if (majorVersion < 4)
mDevice.pressBack(); // twice
}
}
Bundle status = new Bundle();
mInstrumentation.sendStatus(Activity.RESULT_OK, status);
}
public void dismissEula() throws Exception {
UiObject acceptButton =
//mDevice.findObject(new UiSelector().textContains("Accept")
mDevice.findObject(new UiSelector().resourceId("android:id/button1")
.className("android.widget.Button"));
if (!acceptButton.waitForExists(WAIT_TIMEOUT_5SEC)) {
throw new UiObjectNotFoundException("Could not find Accept button");
}
acceptButton.click();
}
public void runBenchmarks() throws Exception {
UiObject runButton =
mDevice.findObject(new UiSelector().textContains("Run Benchmarks")
.className("android.widget.Button"));
if (!runButton.waitForExists(WAIT_TIMEOUT_5SEC)) {
throw new UiObjectNotFoundException("Could not find Run button");
}
runButton.click();
}
public void runCpuBenchmarks(boolean isCorporate) throws Exception {
// The run button is at the bottom of the view and may be off the screen so swipe to be sure
uiDeviceSwipe(Direction.DOWN, 50);
String packageName = isCorporate ? "com.primatelabs.geekbench4.corporate"
: "com.primatelabs.geekbench";
UiObject runButton =
mDevice.findObject(new UiSelector().resourceId(packageName + ":id/runCpuBenchmarks")
.className("android.widget.Button"));
if (!runButton.waitForExists(WAIT_TIMEOUT_5SEC)) {
throw new UiObjectNotFoundException("Could not find Run button");
}
runButton.click();
}
public void waitForResultsv2() throws Exception {
UiSelector selector = new UiSelector();
UiObject resultsWebview = mDevice.findObject(selector.className("android.webkit.WebView"));
if (!resultsWebview.waitForExists(WAIT_TIMEOUT_20MIN)) {
throw new UiObjectNotFoundException("Did not see Geekbench results screen.");
}
}
public void waitForResultsv3onwards() throws Exception {
UiSelector selector = new UiSelector();
UiObject runningTextView = mDevice.findObject(selector.textContains("Running")
.className("android.widget.TextView"));
if (!runningTextView.waitUntilGone(WAIT_TIMEOUT_20MIN)) {
throw new UiObjectNotFoundException("Did not get to Geekbench results screen.");
}
}
public void scrollThroughResults() throws Exception {
UiSelector selector = new UiSelector();
mDevice.pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);
sleep(1);
mDevice.pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);
sleep(1);
mDevice.pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);
sleep(1);
mDevice.pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);
}
public void shareResults() throws Exception {
sleep(2); // transition
UiSelector selector = new UiSelector();
mDevice.pressMenu();
UiObject shareButton = mDevice.findObject(selector.text("Share")
.className("android.widget.TextView"));
shareButton.waitForExists(WAIT_TIMEOUT_5SEC);
shareButton.click();
}
}

View File

@ -0,0 +1,23 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:2.3.2'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,43 @@
#!/bin/bash
set -eux
# CD into build dir if possible - allows building from any directory
script_path='.'
if `readlink -f $0 &>/dev/null`; then
script_path=`readlink -f $0 2>/dev/null`
fi
script_dir=`dirname $script_path`
cd $script_dir
# Ensure gradelw exists before starting
if [[ ! -f gradlew ]]; then
echo 'gradlew file not found! Check that you are in the right directory.'
exit 9
fi
# Copy base class library from wa dist
libs_dir=app/libs
base_class=`python -c "import os, wa; print os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar')"`
mkdir -p $libs_dir
cp $base_class $libs_dir
# Build and return appropriate exit code if failed
# gradle build
./gradlew clean :app:assembleDebug
exit_code=$?
if [[ $exit_code -ne 0 ]]; then
echo "ERROR: 'gradle build' exited with code $exit_code"
exit $exit_code
fi
# If successful move APK file to workload folder (overwrite previous)
package=com.arm.wa.uiauto.geekbench
rm -f ../$package
if [[ -f app/build/apk/$package.apk ]]; then
cp app/build/apk/$package.apk ../$package.apk
else
echo 'ERROR: UiAutomator apk could not be found!'
exit 9
fi

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Wed May 03 15:42:44 BST 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip

160
wa/workloads/geekbench/uiauto/gradlew vendored Executable file
View File

@ -0,0 +1,160 @@
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"

View File

@ -0,0 +1,90 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1 @@
include ':app'