From ab48dcadc4eb74a126a8d76bece720ab000b1452 Mon Sep 17 00:00:00 2001 From: Milosz Wasilewski Date: Thu, 30 Mar 2017 18:54:46 +0100 Subject: automated: android: add refactored apk-automation Change-Id: I5a81b3c4317dc56f90df37c5a8f83d2547542fab Signed-off-by: Milosz Wasilewski --- automated/android/apk-automation/.gitignore | 3 + automated/android/apk-automation/antutu6.py | 141 ++++++++++++ automated/android/apk-automation/apk-automation.sh | 56 +++++ .../android/apk-automation/apk-automation.yaml | 43 ++++ automated/android/apk-automation/benchmarkpi.py | 53 +++++ automated/android/apk-automation/caffeinemark.py | 74 ++++++ automated/android/apk-automation/cf-bench.py | 92 ++++++++ .../android/apk-automation/common/__init__.py | 256 +++++++++++++++++++++ automated/android/apk-automation/gearses2eclair.py | 52 +++++ automated/android/apk-automation/geekbench3.py | 126 ++++++++++ .../apk-automation/glbenchmark25-preferences.xml | 14 ++ automated/android/apk-automation/glbenchmark25.py | 122 ++++++++++ automated/android/apk-automation/javawhetstone.py | 63 +++++ automated/android/apk-automation/jbench.py | 40 ++++ automated/android/apk-automation/linpack.py | 54 +++++ automated/android/apk-automation/main.py | 21 ++ automated/android/apk-automation/quadrantpro.py | 47 ++++ automated/android/apk-automation/rl-sqlite.py | 61 +++++ automated/android/apk-automation/scimark.py | 46 ++++ automated/android/apk-automation/vellamo3.py | 141 ++++++++++++ 20 files changed, 1505 insertions(+) create mode 100644 automated/android/apk-automation/.gitignore create mode 100755 automated/android/apk-automation/antutu6.py create mode 100755 automated/android/apk-automation/apk-automation.sh create mode 100644 automated/android/apk-automation/apk-automation.yaml create mode 100755 automated/android/apk-automation/benchmarkpi.py create mode 100755 automated/android/apk-automation/caffeinemark.py create mode 100755 automated/android/apk-automation/cf-bench.py create mode 100755 automated/android/apk-automation/common/__init__.py create mode 100755 automated/android/apk-automation/gearses2eclair.py create mode 100755 automated/android/apk-automation/geekbench3.py create mode 100644 automated/android/apk-automation/glbenchmark25-preferences.xml create mode 100755 automated/android/apk-automation/glbenchmark25.py create mode 100755 automated/android/apk-automation/javawhetstone.py create mode 100755 automated/android/apk-automation/jbench.py create mode 100755 automated/android/apk-automation/linpack.py create mode 100755 automated/android/apk-automation/main.py create mode 100755 automated/android/apk-automation/quadrantpro.py create mode 100755 automated/android/apk-automation/rl-sqlite.py create mode 100755 automated/android/apk-automation/scimark.py create mode 100755 automated/android/apk-automation/vellamo3.py diff --git a/automated/android/apk-automation/.gitignore b/automated/android/apk-automation/.gitignore new file mode 100644 index 0000000..dbfc7be --- /dev/null +++ b/automated/android/apk-automation/.gitignore @@ -0,0 +1,3 @@ +*.pyc +apks +output diff --git a/automated/android/apk-automation/antutu6.py b/automated/android/apk-automation/antutu6.py new file mode 100755 index 0000000..381bc4a --- /dev/null +++ b/automated/android/apk-automation/antutu6.py @@ -0,0 +1,141 @@ +import time +from common import ApkTestRunner + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.apk_3d_name = "antutu_benchmark_v6_3d_f1.apk" + self.apk_3d_pkg = "com.antutu.benchmark.full" + self.config['apk_file_name'] = "AnTuTu6.0.4.apk" + self.config['apk_package'] = "com.antutu.ABenchMark" + self.config['activity'] = "com.antutu.ABenchMark/.ABenchMarkStart" + super(ApkRunnerImpl, self).__init__(self.config) + + def setUp(self): + self.download_apk(self.apk_3d_name) + self.uninstall_apk(self.apk_3d_pkg) + self.install_apk(self.apk_3d_name) + super(ApkRunnerImpl, self).setUp() + + def tearDown(self): + super(ApkRunnerImpl, self).tearDown() + self.uninstall_apk(self.apk_3d_pkg) + + def parseResult(self): + test_items = [u'3D', u'UX', u'CPU', u'RAM'] + test_subitems = { + u'3D': [u'3D [Garden]', u'3D [Marooned]'], + u'UX': [u'UX Data Secure', u'UX Data process', u'UX Strategy games', u'UX Image process', u'UX I/O performance'], + u'CPU': [u'CPU Mathematics', u'CPU Common Use', u'CPU Multi-Core'], + u'RAM': [] + } + antutu_sum = 0 + for item in test_items: + self.logger.info("Trying to find result id_root for test suite: %s" % item) + found_view = False + while not found_view: + self.dump_always() + id_root = self.vc.findViewWithText(item) + if id_root: + self.logger.info("Found result id_root for test suite: %s" % item) + found_view = True + else: + self.dump_always() + self.logger.info("Press DPAD_DOWN to find %s item" % item) + self.device.press('DPAD_DOWN') + time.sleep(2) + + self.logger.info("Trying to find the score value for test suite: %s" % item) + found_view = False + while not found_view: + self.dump_always() + id_root = self.vc.findViewWithText(item) + score_view = self.vc.findViewById("com.antutu.ABenchMark:id/tv_score_value", + id_root.getParent()) + if score_view: + score = score_view.getText().strip() + self.logger.info("Found %s score: %s" % (item, score)) + try: + score = int(score) + self.report_result('antutu6-%s' % item.lower(), 'pass', score, 'points') + antutu_sum = antutu_sum + int(score) + except ValueError: + self.report_result('antutu6-%s' % item.lower(), 'fail') + + found_view = True + arrow_icon = self.vc.findViewById("com.antutu.ABenchMark:id/iv_arrow", id_root.getParent()) + if arrow_icon: + arrow_icon.touch() + else: + self.logger.info("Press DPAD_DOWN to find %s item value" % item.lower()) + self.device.press('DPAD_DOWN') + time.sleep(2) + + for sub_item in test_subitems[item]: + self.logger.info("Trying to find score value for sub item: %s" % sub_item) + found_view = False + while not found_view: + self.dump_always() + subitem_obj = self.vc.findViewWithText(sub_item) + if subitem_obj: + subitem_value_obj = self.vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/tv_value", subitem_obj.getParent()) + subitem_key = sub_item.replace("[", '').replace("]", '') + subitem_key = subitem_key.replace("/", '') + subitem_key = subitem_key.replace(' ', '-') + subitem_score = subitem_value_obj.getText().strip() + self.logger.info("Found %s score: %s" % (subitem_key, subitem_score)) + try: + subitem_score = int(subitem_score) + self.report_result('antutu6-%s' % subitem_key.lower(), 'pass', subitem_score, 'points') + except ValueError: + self.report_result('antutu6-%s' % subitem_key.lower(), 'fail') + found_view = True + else: + self.logger.info("Press DPAD_DOWN to find sub item: %s" % sub_item) + self.device.press('DPAD_DOWN') + time.sleep(2) + self.report_result('antutu6-sum', 'pass', antutu_sum, 'points') + + def execute(self): + # Enable 64-bit + time.sleep(10) + + finished = False + while not finished: + self.dump_always() + test_region = self.vc.findViewById("com.antutu.ABenchMark:" + "id/start_test_region") + if test_region: + test_region.touch() + + time.sleep(30) + self.dump_always() + text_qr_code = self.vc.findViewWithText(u'QRCode of result') + if text_qr_code: + finished = True + self.logger.info("Benchmark test finished!") + + stop_msg = 'Unfortunately, AnTuTu 3DBench has stopped.' + msg_stopped = self.vc.findViewWithText(stop_msg) + if msg_stopped: + btn_ok = self.vc.findViewWithTextOrRaise(u'OK') # nopep8 + btn_ok.touch() + + # cancel the update + update_msg = "New update available" + update_window = self.vc.findViewWithText(update_msg) + if update_window: + btn_cancel = self.vc.findViewWithTextOrRaise(u'Cancel') + btn_cancel.touch() + + msg = "Please allow the permissions we need for test" + need_permission_msg = self.vc.findViewWithText(msg) + if need_permission_msg: + btn_ok = self.vc.findViewWithTextOrRaise(u'OK') + btn_ok.touch() + + allow_permission_btn = self.vc.findViewById('com.android.packageinstaller' + ':id/permission_allow_button') + if allow_permission_btn: + allow_permission_btn.touch() diff --git a/automated/android/apk-automation/apk-automation.sh b/automated/android/apk-automation/apk-automation.sh new file mode 100755 index 0000000..5ecdba8 --- /dev/null +++ b/automated/android/apk-automation/apk-automation.sh @@ -0,0 +1,56 @@ +#!/bin/sh +# shellcheck disable=SC1091 + +. ./../../lib/sh-test-lib +. ./../../lib/android-test-lib + +SKIP_INSTALL="false" +ANDROID_SERIAL="" +BOOT_TIMEOUT="300" +LOOPS="1" +TEST_NAME="linpack" +APK_DIR="./apks" +BASE_URL="http://testdata.validation.linaro.org/apks/" + +usage() { + echo "Usage: $0 [-S ] [-s ] [-t ] [-l ] [-n ] [-d ] ['-u ']" 1>&2 + exit 1 +} + +while getopts ":S:s:t:l:n:d:u:" opt; do + case "${opt}" in + S) SKIP_INSTALL="${OPTARG}" ;; + s) ANDROID_SERIAL="${OPTARG}" ;; + t) BOOT_TIMEOUT="${OPTARG}" ;; + l) LOOPS="${OPTARG}" ;; + n) TEST_NAME="${OPTARG}" ;; + d) APK_DIR="${OPTARG}" ;; + u) BASE_URL="${OPTARG}" ;; + *) usage ;; + esac +done + +OUTPUT="$(pwd)/output/${TEST_NAME}" +export OUTPUT +RESULT_FILE="${OUTPUT}/result.txt" +export RESULT_FILE + +! check_root && error_msg "Please run this script as superuser!" +if [ "${SKIP_INSTALL}" = "true" ] || [ "${SKIP_INSTALL}" = "True" ]; then + info_msg "Package installation skipped" +else + install_deps "git python python-lxml python-pil python-setuptools curl tar xz-utils" "${SKIP_INSTALL}" + git clone https://github.com/dtmilano/AndroidViewClient + ( + cd AndroidViewClient/ || exit + python setup.py install + ) +fi + +initialize_adb +adb_root +wait_boot_completed "${BOOT_TIMEOUT}" +disable_suspend + +info_msg "device-${ANDROID_SERIAL}: About to run ${TEST_NAME}..." +python main.py -l "${LOOPS}" -n "${TEST_NAME}" -d "${APK_DIR}" -u "${BASE_URL}" diff --git a/automated/android/apk-automation/apk-automation.yaml b/automated/android/apk-automation/apk-automation.yaml new file mode 100644 index 0000000..6fcafe7 --- /dev/null +++ b/automated/android/apk-automation/apk-automation.yaml @@ -0,0 +1,43 @@ +metadata: + name: apk-automation + format: "Lava-Test-Shell Test Definition 1.0" + description: "Automate testing with Android APK." + maintainer: + - milosz.wasilewski@linaro.org + - chase.qi@linaro.org + os: + - android + devices: + - juno + - hi6220-hikey + - x15 + scope: + - performance + +params: + SKIP_INSTALL: "false" + # Timeout for wait_boot_completed in seconds. + BOOT_TIMEOUT: "300" + # Specify adb device SN if more then one device connected. + ANDROID_SERIAL: "" + TEST_NAME: "linpack" + LOOPS: "13" + APK_DIR: "./apks" + BASE_URL: "http://testdata.validation.linaro.org/apks/" + # Specify url and token for publishing artifacts. + ARTIFACTORIAL_URL: "https://archive.validation.linaro.org/artifacts/team/qa/" + # For safety reasons, please set 'ARTIFACTORIAL_TOKEN' variable in job definition with + # 'secrets' dictionary, and set job visibility to personal or group. + # Refer to https://validation.linaro.org/static/docs/v2/publishing-artifacts.html + ARTIFACTORIAL_TOKEN: "" + +run: + steps: + - cd ./automated/android/apk-automation + - ./apk-automation.sh -S "${SKIP_INSTALL}" -t "${BOOT_TIMEOUT}" -s "${ANDROID_SERIAL}" -n "${TEST_NAME}" -l "${LOOPS}" -d "${APK_DIR}" -u "${BASE_URL}" + # Upload test output to artifactorial. + - cp "./output/${TEST_NAME}/result.txt" "./output/result.txt" + - tar caf "output-${TEST_NAME}.tar.xz" "./output" + - ../../utils/upload-to-artifactorial.sh -a "output-${TEST_NAME}.tar.xz" -u "${ARTIFACTORIAL_URL}" -t "${ARTIFACTORIAL_TOKEN}" + # Send test result to LAVA. + - ../../utils/send-to-lava.sh "./output/result.txt" diff --git a/automated/android/apk-automation/benchmarkpi.py b/automated/android/apk-automation/benchmarkpi.py new file mode 100755 index 0000000..94e198d --- /dev/null +++ b/automated/android/apk-automation/benchmarkpi.py @@ -0,0 +1,53 @@ +import sys +import time +from common import ApkTestRunner +from com.dtmilano.android.viewclient import ViewNotFoundException + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = "gr.androiddev.BenchmarkPi-1.apk" + self.config['apk_package'] = "gr.androiddev.BenchmarkPi" + self.config['activity'] = "gr.androiddev.BenchmarkPi/.BenchmarkPi" + super(ApkRunnerImpl, self).__init__(self.config) + + def execute(self): + time.sleep(2) + self.dump_always() + start_button = self.vc.findViewByIdOrRaise("gr.androiddev.BenchmarkPi:id/Button01") + start_button.touch() + + finished = False + while not finished: + time.sleep(1) + try: + self.vc.dump(window='-1') + self.vc.findViewByIdOrRaise("android:id/message") + finished = True + except ViewNotFoundException: + pass + except RuntimeError as e: + self.logger.error(e) + self.logger.info('benchmark pi finished') + + def parseResult(self): + return_text = self.vc.findViewByIdOrRaise("android:id/message").getText().split(" ") + + flagwordA = "calculated" + flagwordB = "Pi" + + if flagwordA in return_text and flagwordB in return_text: + if return_text.index(flagwordB) == return_text.index(flagwordA) + 1: + score_number = return_text[return_text.index(flagwordA) + 3] + score_unit = return_text[return_text.index(flagwordA) + 4].split("!")[0] + self.logger.info('Valid test result found: %s %s' % (score_number, score_unit)) + run_result = "pass" + else: + self.logger.error("Output string changed, parser need to be updated!") + sys.exit(1) + else: + self.logger.error("Can not find keyword which is supposed to show up!") + sys.exit(1) + + self.report_result('benchmarkpi', run_result, score_number, score_unit) diff --git a/automated/android/apk-automation/caffeinemark.py b/automated/android/apk-automation/caffeinemark.py new file mode 100755 index 0000000..246abe9 --- /dev/null +++ b/automated/android/apk-automation/caffeinemark.py @@ -0,0 +1,74 @@ +import time +from common import ApkTestRunner +from com.dtmilano.android.viewclient import ViewNotFoundException + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = 'com.flexycore.caffeinemark-1.apk' + self.config['apk_package'] = 'com.flexycore.caffeinemark' + self.config['activity'] = 'com.flexycore.caffeinemark/.Application' + super(ApkRunnerImpl, self).__init__(self.config) + + def setUp(self): + self.call_adb('shell setenforce 0') + super(ApkRunnerImpl, self).setUp() + + def tearDown(self): + self.call_adb('shell setenforce 1') + super(ApkRunnerImpl, self).tearDown() + + def execute(self): + time.sleep(2) + self.dump_always() + start_button = self.vc.findViewByIdOrRaise("com.flexycore.caffeinemark:id/startButton") + start_button.touch() + + finished = False + while not finished: + try: + self.dump_always() + self.vc.findViewByIdOrRaise("com.flexycore.caffeinemark:id/testResultsCellOneTitle") + finished = True + except ViewNotFoundException: + self.logger.info("ViewNotFoundException when tried to find com.flexycore.caffeinemark:id/testResultsCellOneTitle") + pass + except RuntimeError: + self.logger.info("RuntimeError when tried to find com.flexycore.caffeinemark:id/testResultsCellOneTitle") + pass + self.logger.info("benchmark finished") + + def parseResult(self): + total_score = self.vc.findViewByIdOrRaise("com.flexycore.caffeinemark:id/testResultEntryOverAllScore").getText() + self.report_result("Caffeinemark-score", 'pass', total_score, 'points') + + details_button = self.vc.findViewByIdOrRaise("com.flexycore.caffeinemark:id/testResultsDetailsButton") + details_button.touch() + + time.sleep(2) + self.dump_always() + + sieve_name = self.vc.findViewByIdOrRaise("id/no_id/9").getText() + sieve_score = self.vc.findViewByIdOrRaise("id/no_id/10").getText() + self.report_result("Caffeinemark-Sieve-score", 'pass', sieve_score, 'points') + + loop_name = self.vc.findViewByIdOrRaise("id/no_id/13").getText() + loop_score = self.vc.findViewByIdOrRaise("id/no_id/14").getText() + self.report_result("Caffeinemark-Loop-score", 'pass', loop_score, 'points') + + logic_name = self.vc.findViewByIdOrRaise("id/no_id/17").getText() + logic_score = self.vc.findViewByIdOrRaise("id/no_id/18").getText() + self.report_result("Caffeinemark-Collect-score", 'pass', logic_score, 'points') + + string_name = self.vc.findViewByIdOrRaise("id/no_id/21").getText() + string_score = self.vc.findViewByIdOrRaise("id/no_id/22").getText() + self.report_result("Caffeinemark-String-score", 'pass', string_score, 'points') + + float_name = self.vc.findViewByIdOrRaise("id/no_id/25").getText() + float_score = self.vc.findViewByIdOrRaise("id/no_id/26").getText() + self.report_result("Caffeinemark-Float-score", 'pass', float_score, 'points') + + method_name = self.vc.findViewByIdOrRaise("id/no_id/29").getText() + method_score = self.vc.findViewByIdOrRaise("id/no_id/30").getText() + self.report_result("Caffeinemark-Method-score", 'pass', method_score, 'points') diff --git a/automated/android/apk-automation/cf-bench.py b/automated/android/apk-automation/cf-bench.py new file mode 100755 index 0000000..afe6bab --- /dev/null +++ b/automated/android/apk-automation/cf-bench.py @@ -0,0 +1,92 @@ +import re +import time +from common import ApkTestRunner +from com.dtmilano.android.viewclient import ViewNotFoundException + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = 'CF-Bench-Pro-1.3.apk' + self.config['apk_package'] = 'eu.chainfire.cfbench' + self.config['activity'] = 'eu.chainfire.cfbench/.MainActivity' + super(ApkRunnerImpl, self).__init__(self.config) + + def execute(self): + time.sleep(2) + self.dump_always() + + # Start test button + start_button = self.vc.findViewWithTextOrRaise("Full Benchmark") + start_button.touch() + + # Wait while cf-bench running + finished = False + while not finished: + try: + time.sleep(5) + self.dump_always() + self.vc.findViewByIdOrRaise("eu.chainfire.cfbench:id/admob_preference_layout") + finished = True + except ViewNotFoundException: + pass + except RuntimeError: + pass + except ValueError: + pass + print("Benchmark Finished") + + def __get_score_with_content_desc(self, content_desc, offset=1): + try: + found_score_view = False + while not found_score_view: + score_view = self.vc.findViewWithText(content_desc) + if not score_view: + self.device.press('DPAD_DOWN') + time.sleep(2) + try: + self.dump_always() + except RuntimeError: + pass + except ValueError: + pass + else: + found_score_view = True + + score_uid = score_view.getUniqueId() + uid = int(re.search("id/no_id/(?P\d+)", score_uid).group('uid')) + score = self.vc.findViewByIdOrRaise("id/no_id/%s" % (uid + offset)) + score_text = score.getText() + if score_text.find("%") > 0: + score_value, units = score_text.split(" ") + self.report_result("cfbench-" + content_desc.replace(" ", "-"), 'pass', score_value, units) + + else: + self.report_result("cfbench-" + content_desc.replace(" ", "-"), 'pass', score_text, 'points') + except ViewNotFoundException: + self.logger.error("%s not found" % content_desc) + pass + + def parseResult(self): + # Fetch Scores + self.__get_score_with_content_desc("Native MIPS") + self.__get_score_with_content_desc("Java MIPS") + self.__get_score_with_content_desc("Native MSFLOPS") + self.__get_score_with_content_desc("Java MSFLOPS") + self.__get_score_with_content_desc("Native MDFLOPS") + self.__get_score_with_content_desc("Java MDFLOPS") + self.__get_score_with_content_desc("Native MALLOCS") + self.__get_score_with_content_desc("Native Memory Read") + self.__get_score_with_content_desc("Java Memory Read") + self.__get_score_with_content_desc("Native Memory Write") + self.__get_score_with_content_desc("Java Memory Write") + self.__get_score_with_content_desc("Native Disk Read") + self.__get_score_with_content_desc("Native Disk Write") + self.__get_score_with_content_desc("Java Efficiency MIPS") + self.__get_score_with_content_desc("Java Efficiency MSFLOPS") + self.__get_score_with_content_desc("Java Efficiency MDFLOPS") + self.__get_score_with_content_desc("Java Efficiency Memory Read") + self.__get_score_with_content_desc("Java Efficiency Memory Write") + self.__get_score_with_content_desc("Native Score") + self.__get_score_with_content_desc("Java Score") + self.__get_score_with_content_desc("Overall Score") diff --git a/automated/android/apk-automation/common/__init__.py b/automated/android/apk-automation/common/__init__.py new file mode 100755 index 0000000..963463e --- /dev/null +++ b/automated/android/apk-automation/common/__init__.py @@ -0,0 +1,256 @@ +import csv +import datetime +import json +import logging +import math +import os +import requests +import shutil +import subprocess +import sys +import time +import urlparse +from com.dtmilano.android.viewclient import ViewClient + + +class ApkTestRunner(object): + def __init__(self, config): + self.config = config + + self.logger = logging.getLogger(self.config['name']) + self.logger.setLevel(logging.INFO) + if self.config['verbose']: + self.logger.setLevel(logging.DEBUG) + ch = logging.StreamHandler() + ch.setLevel(logging.DEBUG) + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + ch.setFormatter(formatter) + self.logger.addHandler(ch) + + self.config['output'] = os.getenv("OUTPUT", "./output/%s" % config['name']) + if os.path.exists(self.config['output']): + suffix = datetime.datetime.now().strftime('%Y%m%d%H%M%S') + shutil.move(self.config['output'], '%s-%s' % (self.config['output'], suffix)) + os.makedirs(self.config['output']) + self.results = [] + + serialno = os.getenv('ANDROID_SERIAL') + if serialno is None: + serialno = '.*' + kwargs1 = { + 'serialno': serialno, + 'verbose': True, + 'ignoresecuredevice': False} + self.logger.debug('VC kwargs1: %s' % kwargs1) + self.device, self.serialno = ViewClient.connectToDeviceOrExit(**kwargs1) + kwargs2 = { + 'startviewserver': True, + 'forceviewserveruse': False, + 'autodump': False, + 'ignoreuiautomatorkilled': True, + 'compresseddump': False} + self.logger.debug('VC kwargs2: %s' % kwargs2) + self.vc = ViewClient(self.device, self.serialno, **kwargs2) + + def run(self): + self.validate() + + for i in range(1, self.config['loops'] + 1): + try: + self.logger.info('Running iteration [%s/%s]' % (i, self.config['loops'])) + self.config['itr'] = i + self.logger.info('Test config: %s' % self.config) + self.setUp() + self.execute() + self.parseResult() + self.take_screencap() + self.tearDown() + except Exception as e: + self.take_screencap() + self.report_result(self.config['name'], 'fail') + self.logger.error(e) + sys.exit(1) + + self.collect_log() + self.result_post_processing() + + def report_result(self, name, result, score=None, units=None): + if score is not None: + score = float(score) + if units is not None: + units = str(units) + + tc_name = str(name) + if self.config['loops'] > 1 and self.config['itr'] != 'stats': + tc_name = '%s-itr%s' % (name, self.config['itr']) + + result_string = '%s %s %s %s' % (tc_name, result, score, units) + if score is None: + result_string = '%s %s' % (tc_name, result) + if score is not None and units is None: + result_string = '%s %s %s' % (tc_name, result, score) + + self.logger.info('TestResult: %s' % result_string) + with open('%s/result.txt' % self.config['output'], 'a') as f: + f.write('%s\n' % result_string) + + # Save result to results for post processing. + result = {'itr': self.config['itr'], + 'test_case_id': str(name), + 'result': str(result), + 'measurement': score, + 'units': units} + self.results.append(result) + + def statistics_result(self): + if self.config['loops'] == 1: + return + + self.config['itr'] = 'stats' + + tc_list = [] + for result in self.results: + if result['measurement'] is not None: + tc_list.append(result['test_case_id']) + tc_list = set(tc_list) + + for tc in tc_list: + ms_list = [] + for result in self.results: + if result['test_case_id'] == tc: + ms_list.append(result['measurement']) + + units = '' + for result in self.results: + if result['test_case_id'] == tc: + units = result['units'] + break + + # Calculate and report population standard deviation and standard error. + mean = sum(ms_list) / len(ms_list) + variance = sum([(e - mean) ** 2 for e in ms_list]) / len(ms_list) + pstdev = math.sqrt(variance) + pstderr = pstdev / math.sqrt(len(ms_list)) + self.report_result('%s-min' % tc, 'pass', min(ms_list), units) + self.report_result('%s-max' % tc, 'pass', max(ms_list), units) + self.report_result('%s-mean' % tc, 'pass', mean, units) + self.report_result('%s-sigma' % tc, 'pass', pstdev, units) + self.report_result('%s-stderr' % tc, 'pass', pstderr, units) + + def result_post_processing(self): + self.statistics_result() + + # Save results to output/name/name-result.csv. + fieldnames = ['itr', 'test_case_id', 'result', 'measurement', 'units'] + with open('%s/result.csv' % self.config['output'], 'w') as f: + writer = csv.DictWriter(f, fieldnames=fieldnames) + writer.writeheader() + for result in self.results: + writer.writerow(result) + self.logger.info('Result saved to %s/result.csv' % self.config['output']) + + # Save results to output/name/name-result.json + with open('%s/result.json' % self.config['output'], 'w') as f: + json.dump([self.results], f, indent=4) + self.logger.info('Result saved to %s/result.json' % self.config['output']) + + def dump_always(self): + success = False + while not success: + try: + self.vc.dump() + success = True + except RuntimeError: + print("Got RuntimeError when call vc.dump()") + time.sleep(5) + except ValueError: + print("Got ValueError when call vc.dump()") + time.sleep(5) + + def call_adb(self, args): + self.logger.debug("calling") + self.logger.debug("adb %s" % args) + try: + # Need to set shell=True to save output to host directly. + subprocess.check_call("adb %s" % args, shell=True) + except (OSError, subprocess.CalledProcessError) as e: + print(e) + sys.exit(1) + + def validate(self): + if self.config['apk_file_name'] is None: + self.logger.error("APK file name not set") + sys.exit(1) + + if self.config['apk_package'] is None: + self.logger.error("APK package name not set") + sys.exit(1) + + if self.config['activity'] is None: + self.logger.error("Activity name not set") + sys.exit(1) + + def download_apk(self, apk_name): + # create directory for downloaded files + if not os.path.isdir(os.path.abspath(self.config['apk_dir'])): + os.makedirs(os.path.abspath(self.config['apk_dir'])) + + # download APK if not already downloaded + apk_path = os.path.join(os.path.abspath(self.config['apk_dir']), apk_name) + if not os.path.isfile(apk_path): + apk_url = urlparse.urljoin(self.config['base_url'], apk_name) + r = requests.get(apk_url, stream=True) + if r.status_code == 200: + with open(apk_path, 'wb') as f: + r.raw.decode_content = True + shutil.copyfileobj(r.raw, f) + + def install_apk(self, apk_name): + apk_path = os.path.join(os.path.abspath(self.config['apk_dir']), apk_name) + self.logger.info('Installing %s' % os.path.basename(apk_path)) + self.call_adb("install %s" % apk_path) + + def uninstall_apk(self, package): + install_packages = subprocess.check_output(['adb', 'shell', 'pm', 'list', 'packages']) + if package in install_packages: + self.logger.info('Stopping %s' % package) + self.call_adb("shell am force-stop %s" % package) + + self.logger.info('Uninstalling %s' % package) + self.call_adb("shell pm uninstall %s" % package) + + def take_screencap(self): + screencap_file = '/data/local/tmp/%s-itr%s.png' % (self.config['name'], self.config['itr']) + self.call_adb('shell screencap %s' % screencap_file) + self.logger.info('Pulling %s to output directory...' % screencap_file) + self.call_adb('pull %s %s' % (screencap_file, self.config['output'])) + + def collect_log(self): + self.logger.info("Saving logcat.log, logcat-events.log and dmesg.log to output directory...") + self.call_adb('logcat -d -v time > %s/logcat.log' % self.config['output']) + self.call_adb('logcat -d -b events -v time > %s/logcat-events.log' % self.config['output']) + self.call_adb('shell dmesg > %s/dmesg.log' % self.config['output']) + + def setUp(self): + # Install APK. + self.download_apk(self.config['apk_file_name']) + self.uninstall_apk(self.config['apk_package']) + self.install_apk(self.config['apk_file_name']) + + # Clear logcat buffer. + self.call_adb("logcat -c") + self.call_adb("logcat -b events -c") + time.sleep(3) + + # Start intent. + self.logger.info('Starting %s' % self.config['apk_package']) + self.call_adb("shell am start -W -S %s" % self.config['activity']) + + def execute(self): + raise NotImplementedError + + def parseResult(self): + raise NotImplementedError + + def tearDown(self): + self.uninstall_apk(self.config['apk_package']) diff --git a/automated/android/apk-automation/gearses2eclair.py b/automated/android/apk-automation/gearses2eclair.py new file mode 100755 index 0000000..4390f13 --- /dev/null +++ b/automated/android/apk-automation/gearses2eclair.py @@ -0,0 +1,52 @@ +import sys +import time +from common import ApkTestRunner + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = "GearsES2eclair-20141021.apk" + self.config['apk_package'] = "com.jeffboody.GearsES2eclair" + self.config['activity'] = "com.jeffboody.GearsES2eclair/.GearsES2eclair" + super(ApkRunnerImpl, self).__init__(self.config) + + def execute(self): + self.logger.info('Running GearsES2eclair for 60 seconds...') + time.sleep(60) + + def parseResult(self): + raw_output_file = "%s/logcat-gearses2eclair-itr%s.log" % (self.config['output'], self.config['itr']) + self.call_adb('logcat -d > %s' % raw_output_file) + + flagwordA = "a3d_GLES_dump" + flagwordB = "fps" + result_collector = [] + + logfile = open(raw_output_file, "r") + for line in logfile: + linelist = line.strip("\n").strip("\r").split(" ") + linelist = filter(None, linelist) + for itemA in linelist: + if itemA.find(flagwordA) != -1: + for itemB in linelist: + if itemB.find(flagwordB) != -1: + self.logger.info('linelist: %s' % linelist) + for i in range(0, len(linelist)): + grouplist = linelist[i].split("=") + if len(grouplist) == 2 and grouplist[0] == flagwordB: + result_collector.append(grouplist[1]) + logfile.close() + + self.logger.info('result_collector: %s' % result_collector) + if len(result_collector) > 0: + average_fps = sum(float(element) for element in result_collector) / len(result_collector) + score_number = average_fps + run_result = "pass" + score_unit = flagwordB + self.logger.info("The average FPS in this test run is %s" % str(score_number)) + else: + self.logger.error("The collector is empty, no actual result received!") + sys.exit(1) + + self.report_result('gearses2eclair', run_result, score_number, score_unit) diff --git a/automated/android/apk-automation/geekbench3.py b/automated/android/apk-automation/geekbench3.py new file mode 100755 index 0000000..211107d --- /dev/null +++ b/automated/android/apk-automation/geekbench3.py @@ -0,0 +1,126 @@ +import glob +import os +import sys +import shutil +import time +from common import ApkTestRunner +from com.dtmilano.android.viewclient import ViewNotFoundException + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = "com.primatelabs.geekbench3.apk" + self.config['apk_package'] = "com.primatelabs.geekbench3" + self.config['activity'] = "com.primatelabs.geekbench3/.HomeActivity" + super(ApkRunnerImpl, self).__init__(self.config) + + def all_fail(self): + self.report_result('geekbench-run', 'fail') + self.report_result('geekbench-single-core', 'skip') + self.report_result('geekbench-multi-core', 'skip') + + def execute(self): + try: + time.sleep(2) + self.dump_always() + trigger = self.vc.findViewByIdOrRaise(self.config['apk_package'] + ":id/runBenchmarks") + trigger.touch() + self.logger.info("Geekbench 3 Test Started!") + except ViewNotFoundException: + self.logger.error("Can not find the start button! Please check the screen!") + self.all_fail() + sys.exit(1) + + time.sleep(10) + self.dump_always() + + try: + self.vc.findViewByIdOrRaise("android:id/progress") + except ViewNotFoundException: + self.logger.error("Something goes wrong! It is unusual that the test has not been started after 10+ seconds! Please manually check it!") + self.all_fail() + sys.exit(1) + + finished = False + while (not finished): + time.sleep(45) + self.dump_always() + flag = self.vc.findViewWithText("RESULT") + if flag is not None: + self.logger.info("Geekbench 3 Test Finished!") + finished = True + else: + self.logger.info("Geekbench 3 Test is still in progress...") + + # Generate the .gb3 file + self.device.press('KEYCODE_MENU') + time.sleep(1) + self.dump_always() + share_button = self.vc.findViewWithText("Share") + if share_button is not None: + share_button.touch() + time.sleep(5) + else: + self.logger.error("Can not find the Share button to generate .gb3 file! Please check the screen!") + sys.exit(1) + + def parseResult(self): + raw_output_file = '%s/geekbench3-result-itr%s.gb3' % (self.config['output'], self.config['itr']) + self.logger.info('Pulling /data/user/0/com.primatelabs.geekbench3/files to output directory...') + self.call_adb('pull /data/user/0/com.primatelabs.geekbench3/files %s/files' % self.config['output']) + db_file_list = glob.glob('%s/files/*.gb3' % self.config['output']) + if len(db_file_list) > 1: + self.logger.error('More then one db file found...') + sys.exit(1) + db_file = db_file_list[0] + os.rename(db_file, raw_output_file) + + singlecore_keyword = "score" + singlecore_result = {} + multicore_keyword = "multicore_score" + multicore_result = {} + endpoint_keyword = "multicore_rate" + + if os.path.exists(raw_output_file): + logfile = open(raw_output_file, "r") + for line in logfile: + # Can't believe this is an one line file! + # Find the ending point with the information we want + endpoint = line.find(endpoint_keyword) + if endpoint == -1: + self.logger.error("Can not find %s in log file! Please manually check it!" % endpoint_keyword) + self.all_fail() + sys.exit(1) + else: + self.report_result("geekbench-run", "pass") + result_cut = line[0:endpoint].split(",") + result_cut = [element.replace('"', '').replace(' ', '') for element in result_cut] + for item in result_cut: + if singlecore_keyword == item.split(":")[0]: + singlecore_result[singlecore_keyword] = item.split(":")[1] + if multicore_keyword == item.split(":")[0]: + multicore_result[multicore_keyword] = item.split(":")[1] + if len(singlecore_result) != 1: + run_result = "fail" + self.logger.error("Incorrect value for single core test result! Please check the test result file!") + self.report_result('geekbench-single-core', run_result) + else: + run_result = "pass" + self.report_result('geekbench-single-core', run_result, singlecore_result[singlecore_keyword]) + if len(multicore_result) != 1: + run_result = "fail" + self.logger.error("Incorrect value for multi core test result! Please check the test result file!") + self.report_result('geekbench-multi-core', run_result) + else: + run_result = "pass" + self.report_result('geekbench-multi-core', run_result, multicore_result[multicore_keyword]) + + logfile.close() + else: + self.logger.error("Result file does not exist: %s" % raw_output_file) + sys.exit(1) + + def tearDown(self): + super(ApkRunnerImpl, self).tearDown() + shutil.rmtree('%s/files/' % self.config['output']) diff --git a/automated/android/apk-automation/glbenchmark25-preferences.xml b/automated/android/apk-automation/glbenchmark25-preferences.xml new file mode 100644 index 0000000..1315e4c --- /dev/null +++ b/automated/android/apk-automation/glbenchmark25-preferences.xml @@ -0,0 +1,14 @@ + + +COMMUNITY +androidcommunityuser +1920x1080 + + +0x0 + +pandaboard + +asdojavmdthjwejgasogaouhgaufiuadxfoafgaosdcjaocaod + + diff --git a/automated/android/apk-automation/glbenchmark25.py b/automated/android/apk-automation/glbenchmark25.py new file mode 100755 index 0000000..657cd01 --- /dev/null +++ b/automated/android/apk-automation/glbenchmark25.py @@ -0,0 +1,122 @@ +import time +import xml.dom.minidom +from common import ApkTestRunner + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = 'GLBenchmark_2.5.1.apk' + self.config['apk_package'] = 'com.glbenchmark.glbenchmark25' + self.config['activity'] = 'com.glbenchmark.glbenchmark25/com.glbenchmark.activities.GLBenchmarkDownloaderActivity' + super(ApkRunnerImpl, self).__init__(self.config) + + def setUp(self): + self.download_apk('main.1.com.glbenchmark.glbenchmark25.obb') + self.uninstall_apk(self.config['apk_package']) + self.install_apk(self.config['apk_file_name']) + + # Push data and config files. + self.logger.info('Pushing main.1.com.glbenchmark.glbenchmark25.obb to target...') + self.call_adb('push %s/main.1.com.glbenchmark.glbenchmark25.obb /sdcard/Android/obb/com.glbenchmark.glbenchmark25/main.1.com.glbenchmark.glbenchmark25.obb' % self.config['apk_dir']) + self.logger.info('Pushing glbenchmark25-preferences.xml to target...') + self.call_adb('push ./glbenchmark25-preferences.xml /data/data/com.glbenchmark.glbenchmark25/shared_prefs/com.glbenchmark.glbenchmark25_preferences.xml') + + # Clear logcat buffer. + self.call_adb("logcat -c") + self.call_adb("logcat -b events -c") + time.sleep(3) + + # Start intent. + self.logger.info('Starting %s' % self.config['apk_package']) + self.call_adb("shell am start -W -S %s" % self.config['activity']) + + def execute(self): + time.sleep(2) + + self.vc.dump(window='-1') + test_type = self.vc.findViewWithText("Performance Tests") + if test_type: + test_type.touch() + time.sleep(2) + + # By some reason in order to select all test, a back step is required + self.dump_always() + test_selection = self.vc.findViewByIdOrRaise("com.glbenchmark.glbenchmark25:id/buttonAll") + self.device.press('KEYCODE_BACK') + time.sleep(3) + + test_type.touch() + time.sleep(2) + test_selection.touch() + self.logger.info("All selected!") + time.sleep(3) + + # Disable crashed test suites + self.vc.dump(window='-1') + crashed_test_name = "C24Z24MS4" + self.logger.info('Test suite %s is going to be disabled!' % crashed_test_name) + crashed_test = self.vc.findViewWithText(crashed_test_name) + if crashed_test is not None: + crashed_test.touch() + self.logger.info('Test suite %s has been excluded!' % crashed_test_name) + time.sleep(2) + else: + self.logger.info('Can not find test suite %s, please check the screen!' % crashed_test_name) + + # Start selected test suites + start_button = self.vc.findViewByIdOrRaise("com.glbenchmark.glbenchmark25:id/buttonStart") + start_button.touch() + time.sleep(2) + + finished = False + while not finished: + time.sleep(120) + self.dump_always() + flag = self.vc.findViewWithText("Result processing") + if flag is not None: + self.logger.info('GLBenchmark Test Finished.') + finished = True + # Give up the result upload + cancel_button = self.vc.findViewWithText("Cancel") + if cancel_button is not None: + cancel_button.touch() + time.sleep(5) + else: + self.logger.error('Can not find cancel button! Please check the pop up window!') + else: + self.logger.info('GLBenchmark Test is still in progress...') + + def getText(self, node): + children = node.childNodes + rc = [] + for node in children: + if node.nodeType == node.TEXT_NODE: + rc.append(node.data) + return ''.join(rc) + + def logparser(self, cached_result_file): + run_result = 'pass' + dom = xml.dom.minidom.parse(cached_result_file) + results = dom.getElementsByTagName('test_result') + + for test in results: + title = self.getText(test.getElementsByTagName('title')[0]) + test_type = self.getText(test.getElementsByTagName('type')[0]) + score_number = self.getText(test.getElementsByTagName('score')[0]) + fps = self.getText(test.getElementsByTagName('fps')[0]) + score_unit = self.getText(test.getElementsByTagName('uom')[0]) + benchmark_name = title.replace(" ", "-").replace(":", "") + "-" + test_type.replace(" ", "-").replace(":", "") + self.report_result(benchmark_name, run_result, score_number, score_unit) + + if fps != "": + score_number = fps.split(" ")[0] + score_unit = fps.split(" ")[1] + self.report_result(benchmark_name, run_result, score_number, score_unit) + + def parseResult(self): + cached_result_file = '%s/last-results-2.5.1-itr%s.xml' % (self.config['output'], self.config['itr']) + self.logger.info('pull /data/data/com.glbenchmark.glbenchmark25/cache/last_results_2.5.1.xml to output directory...') + self.call_adb('pull /data/data/com.glbenchmark.glbenchmark25/cache/last_results_2.5.1.xml %s' % cached_result_file) + + self.logparser(cached_result_file) diff --git a/automated/android/apk-automation/javawhetstone.py b/automated/android/apk-automation/javawhetstone.py new file mode 100755 index 0000000..a5d974d --- /dev/null +++ b/automated/android/apk-automation/javawhetstone.py @@ -0,0 +1,63 @@ +import re +import time +from common import ApkTestRunner +from com.dtmilano.android.viewclient import ViewNotFoundException + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = 'JavaBenchmark/pure-java-benchmarks/01-Java_Whetstone.apk' + self.config['apk_package'] = 'com.roywhet' + self.config['activity'] = 'com.roywhet/.JavaWhetstoneActivity' + super(ApkRunnerImpl, self).__init__(self.config) + + def execute(self): + self.dump_always() + btn_run = self.vc.findViewByIdOrRaise("com.roywhet:id/startButton") + btn_run.touch() + time.sleep(2) + + finished = False + while not finished: + try: + time.sleep(30) + self.dump_always() + self.jws_results = self.vc.findViewByIdOrRaise("com.roywhet:id/displayDetails") + if re.search('Total Elapsed Time', self.jws_results.getText()): + finished = True + self.logger.info('benchmark finished') + except ViewNotFoundException: + pass + except RuntimeError: + pass + except ValueError: + pass + + def parseResult(self): + key_unit_hash = { + "N1": "MFLOPS", + "N2": "MFLOPS", + "N3": "MOPS", + "N4": "MOPS", + "N5": "MOPS", + "N6": "MFLOPS", + "N7": "MOPS", + "N8": "MOPS", + "MWIPS": "MFLOPS" + } + + for line in self.jws_results.getText().split('\n'): + line = str(line.strip()) + elements = re.split(r'\s+', line) + if line.startswith('MWIPS'): + units = key_unit_hash['MWIPS'] + key = "MWIPS" + value = elements[1] + elif line.startswith('N'): + units = key_unit_hash[elements[0]] + key = "%s-%s" % (elements[0], elements[1]) + value = elements[2] + else: + continue + self.report_result('javawhetstone-%s' % key, 'pass', value, units) diff --git a/automated/android/apk-automation/jbench.py b/automated/android/apk-automation/jbench.py new file mode 100755 index 0000000..0481081 --- /dev/null +++ b/automated/android/apk-automation/jbench.py @@ -0,0 +1,40 @@ +import re +import time +from common import ApkTestRunner +from com.dtmilano.android.viewclient import ViewNotFoundException + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = 'JavaBenchmark/pure-java-benchmarks/03-JBench.apk' + self.config['apk_package'] = 'it.JBench.bench' + self.config['activity'] = 'it.JBench.bench/it.JBench.jbench.MainActivity' + super(ApkRunnerImpl, self).__init__(self.config) + + def execute(self): + self.dump_always() + btn_jbench = self.vc.findViewByIdOrRaise("it.JBench.bench:id/button1") + btn_jbench.touch() + time.sleep(2) + + finished = False + while (not finished): + try: + time.sleep(5) + self.dump_always() + results = self.vc.findViewByIdOrRaise("it.JBench.bench:id/textViewResult") + if re.search('^\d+$', results.getText()): + finished = True + print("benchmark finished") + print("%s=%s" % ("JBench", results.getText().strip())) + self.report_result("jbench", 'pass', results.getText().strip(), 'points') + except ViewNotFoundException: + pass + except RuntimeError: + pass + except ValueError: + pass + + def parseResult(self): + pass diff --git a/automated/android/apk-automation/linpack.py b/automated/android/apk-automation/linpack.py new file mode 100755 index 0000000..d71687f --- /dev/null +++ b/automated/android/apk-automation/linpack.py @@ -0,0 +1,54 @@ +import time +from common import ApkTestRunner + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = "com.greenecomputing.linpack-1.apk" + self.config['apk_package'] = "com.greenecomputing.linpack" + self.config['activity'] = "com.greenecomputing.linpack/.Linpack" + super(ApkRunnerImpl, self).__init__(self.config) + + def execute(self): + # single core test. + time.sleep(2) + self.dump_always() + start_single_button = self.vc.findViewByIdOrRaise("com.greenecomputing.linpack:id/btnsingle") + start_single_button.touch() + + time.sleep(2) + self.dump_always() + start_single_button = self.vc.findViewById("com.greenecomputing.linpack:id/btnsingle") + + while not start_single_button: + time.sleep(2) + self.dump_always() + start_single_button = self.vc.findViewById("com.greenecomputing.linpack:id/btnsingle") + + mflops_single_score = self.vc.findViewByIdOrRaise("com.greenecomputing.linpack:id/txtmflops_result") + time_single_score = self.vc.findViewByIdOrRaise("com.greenecomputing.linpack:id/txttime_result") + self.report_result('Linpack-MFLOPSSingleScore', 'pass', mflops_single_score.getText(), 'MFLOPS') + self.report_result('Linpack-TimeSingleScore', 'pass', time_single_score.getText(), 'seconds') + + # Multi core test. + self.dump_always() + start_multi_button = self.vc.findViewByIdOrRaise("com.greenecomputing.linpack:id/btncalculate") + start_multi_button.touch() + + time.sleep(2) + self.dump_always() + start_single_button = self.vc.findViewById("com.greenecomputing.linpack:id/btnsingle") + + while not start_single_button: + time.sleep(2) + self.dump_always() + start_single_button = self.vc.findViewById("com.greenecomputing.linpack:id/btnsingle") + + mflops_multi_score = self.vc.findViewByIdOrRaise("com.greenecomputing.linpack:id/txtmflops_result") + time_multi_score = self.vc.findViewByIdOrRaise("com.greenecomputing.linpack:id/txttime_result") + self.report_result('Linpack-MFLOPSMultiScore', 'pass', mflops_multi_score.getText(), 'MFLOPS') + self.report_result('Linpack-TimeMultiScore', 'pass', time_multi_score.getText(), 'seconds') + + def parseResult(self): + pass diff --git a/automated/android/apk-automation/main.py b/automated/android/apk-automation/main.py new file mode 100755 index 0000000..1b06c80 --- /dev/null +++ b/automated/android/apk-automation/main.py @@ -0,0 +1,21 @@ +from argparse import ArgumentParser +import importlib + +parser = ArgumentParser() +parser.add_argument('-d', '--apk_dir', dest='apk_dir', default='./apks', + help="Specify APK's directory.") +parser.add_argument('-u', '--base_url', dest='base_url', default='http://testdata.validation.linaro.org/apks/', + help="Specify APK's base url.") +parser.add_argument('-n', '--name', dest='name', default='linpack', + help='Specify test name.') +parser.add_argument('-l', '--loops', type=int, dest='loops', default=1, + help='Set the number of test loops.') +parser.add_argument('-v', '--verbose', action='store_true', dest='verbose', + default=False, help='Set the number of test loops.') +args = parser.parse_args() +print('Test job arguments: %s' % args) + +config = vars(args) +mod = importlib.import_module(config['name']) +a = mod.ApkRunnerImpl(config) +a.run() diff --git a/automated/android/apk-automation/quadrantpro.py b/automated/android/apk-automation/quadrantpro.py new file mode 100755 index 0000000..6c39028 --- /dev/null +++ b/automated/android/apk-automation/quadrantpro.py @@ -0,0 +1,47 @@ +from common import ApkTestRunner +from com.dtmilano.android.viewclient import ViewNotFoundException + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = 'com.aurorasoftworks.quadrant.ui.professional-1.apk' + self.config['apk_package'] = 'com.aurorasoftworks.quadrant.ui.professional' + self.config['activity'] = 'com.aurorasoftworks.quadrant.ui.professional/.QuadrantProfessionalLauncherActivity' + super(ApkRunnerImpl, self).__init__(self.config) + + def execute(self): + self.dump_always() + view_license_btn = self.vc.findViewWithText("View license") + if view_license_btn: + ok_button = self.vc.findViewWithTextOrRaise("OK") + ok_button.touch() + + self.dump_always() + run_full_item = self.vc.findViewWithTextOrRaise(u'Run full benchmark') + run_full_item.touch() + + finished = False + while not finished: + try: + self.dump_always() + self.vc.findViewByIdOrRaise("com.aurorasoftworks.quadrant.ui.professional:id/chart") + finished = True + self.logger.info('Benchmark finished') + except ViewNotFoundException: + pass + except RuntimeError: + pass + except ValueError: + pass + + def parseResult(self): + raw_output_file = "%s/logcat-quadrandpro-itr%s.log" % (self.config['output'], self.config['itr']) + self.call_adb('logcat -d -v brief > %s' % raw_output_file) + + with open(raw_output_file) as logfile: + for line in logfile: + if 'aggregate score is' in line: + tc_id = line.split()[3].replace('_', '-') + measurement = line.split()[-1] + self.report_result('quadrandpro-%s' % tc_id, 'pass', measurement, 'points') diff --git a/automated/android/apk-automation/rl-sqlite.py b/automated/android/apk-automation/rl-sqlite.py new file mode 100755 index 0000000..48ff4a8 --- /dev/null +++ b/automated/android/apk-automation/rl-sqlite.py @@ -0,0 +1,61 @@ +import time +from common import ApkTestRunner + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = 'RL_Benchmark_SQLite_v1.3.apk' + self.config['apk_package'] = 'com.redlicense.benchmark.sqlite' + self.config['activity'] = 'com.redlicense.benchmark.sqlite/.Main' + super(ApkRunnerImpl, self).__init__(self.config) + + def execute(self): + self.dump_always() + btn_start = self.vc.findViewWithTextOrRaise(u'Start') + btn_start.touch() + + finished = False + while(not finished): + self.dump_always() + overall_result = self.vc.findViewWithText(u'Overall') + if overall_result: + finished = True + self.logger.info("benchmark finished") + + def __get_score_with_text(self, text): + found_score_view = False + while not found_score_view: + linear_layout = self.vc.findViewByIdOrRaise("com.redlicense.benchmark.sqlite:id/stats") + for ch in linear_layout.children: + subitem = self.vc.findViewWithText(text, ch) + if subitem: + subitem_result = self.vc.findViewByIdOrRaise("com.redlicense.benchmark.sqlite:id/test_result", ch) + score = subitem_result.getText().replace("sec", "").strip() + score_in_ms = float(score) * 1000 + self.report_result("RL-sqlite-" + text.replace(" ", "-"), 'pass', str(score_in_ms), "ms") + found_score_view = True + break + if subitem is None: + self.logger.info("%s not found, need to pageup" % text) + self.device.press('DPAD_UP') + time.sleep(2) + self.device.press('DPAD_UP') + time.sleep(2) + self.dump_always() + + def parseResult(self): + self.__get_score_with_text("Overall") + self.__get_score_with_text("DROP TABLE") + self.__get_score_with_text("DELETE with an index") + self.__get_score_with_text("DELETE without an index") + self.__get_score_with_text("INSERTs from a SELECT") + self.__get_score_with_text("25000 UPDATEs with an index") + self.__get_score_with_text("1000 UPDATEs without an index") + self.__get_score_with_text("5000 SELECTs with an index") + self.__get_score_with_text("Creating an index") + self.__get_score_with_text("100 SELECTs on a string comparison") + self.__get_score_with_text("100 SELECTs without an index") + self.__get_score_with_text("25000 INSERTs into an indexed table in a transaction") + self.__get_score_with_text("25000 INSERTs in a transaction") + self.__get_score_with_text("1000 INSERTs") diff --git a/automated/android/apk-automation/scimark.py b/automated/android/apk-automation/scimark.py new file mode 100755 index 0000000..bcc30d5 --- /dev/null +++ b/automated/android/apk-automation/scimark.py @@ -0,0 +1,46 @@ +import time +from common import ApkTestRunner +from com.dtmilano.android.viewclient import ViewNotFoundException + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = 'JavaBenchmark/non-pure-java-benchmarks/03-SciMark.apk' + self.config['apk_package'] = 'net.danielroggen.scimark' + self.config['activity'] = 'net.danielroggen.scimark/.ActivityMain' + super(ApkRunnerImpl, self).__init__(self.config) + + def execute(self): + time.sleep(5) + self.dump_always() + btn_java_bench = self.vc.findViewWithTextOrRaise(u'Java bench') + btn_java_bench.touch() + + finished = False + while not finished: + try: + time.sleep(60) + self.dump_always() + self.sci_results = self.vc.findViewByIdOrRaise("net.danielroggen.scimark:id/textViewResult") + if self.sci_results.getText().find("Done") > 0: + finished = True + self.logger.info("benchmark finished") + except ViewNotFoundException: + pass + except RuntimeError: + pass + except ValueError: + pass + + def parseResult(self): + keys = ["FFT (1024)", "SOR (100x100)", "Monte Carlo", + "Sparse matmult (N=1000, nz=5000)", "LU (100x100)", "Composite Score"] + + for line in self.sci_results.getText().replace(": \n", ":").split("\n"): + line = str(line.strip()) + key_val = line.split(":") + if len(key_val) == 2: + if key_val[0].strip() in keys: + key = key_val[0].strip().replace(' ', '-').replace('(', '').replace(')', '').replace(',', '') + self.report_result("scimark-" + key, 'pass', key_val[1].strip(), 'Mflops') diff --git a/automated/android/apk-automation/vellamo3.py b/automated/android/apk-automation/vellamo3.py new file mode 100755 index 0000000..d9719a7 --- /dev/null +++ b/automated/android/apk-automation/vellamo3.py @@ -0,0 +1,141 @@ +import json +import time +from common import ApkTestRunner +from com.dtmilano.android.viewclient import ViewNotFoundException + + +class ApkRunnerImpl(ApkTestRunner): + def __init__(self, config): + self.config = config + self.config['apk_file_name'] = "com.quicinc.vellamo-3.apk" + self.config['apk_package'] = "com.quicinc.vellamo" + self.config['activity'] = "com.quicinc.vellamo/.main.MainActivity" + super(ApkRunnerImpl, self).__init__(self.config) + + def choose_chapter(self, chapter_name): + # ToDo: scroll screen if chapter is not found on the first screen + self.dump_always() + scroll = self.vc.findViewWithText(u'''LET'S ROLL''') + if scroll: + print("Click LET'S ROLL") + scroll.touch() + + chapter_tab = None + self.dump_always() + while chapter_tab is None: + gotit_button = self.vc.findViewWithText(u'GOT IT') + if gotit_button: + print("Click GOT IT") + gotit_button.touch() + else: + print("press DPAD_DOWN") + self.device.press("DPAD_DOWN") + self.dump_always() + chapter_tab = self.vc.findViewWithText(chapter_name) + + enclosing_tab = chapter_tab.getParent().getParent() + for child in enclosing_tab.children: + if child.getClass() == "android.widget.FrameLayout": + for subchild in child.children: + if subchild.getId() == "com.quicinc.vellamo:id/card_launcher_run_button": + subchild.touch() + break + + def execute(self): + self.dump_always() + # Accept Vellamo EULA + btn_setup_1 = self.vc.findViewByIdOrRaise("android:id/button1") + btn_setup_1.touch() + + # Open settings + self.dump_always() + btn_settings = self.vc.findViewByIdOrRaise('com.quicinc.vellamo:id/main_toolbar_wheel') + btn_settings.touch() + + # Disable animations + self.dump_always() + btn_animations = self.vc.findViewWithTextOrRaise(u'Make Vellamo even more beautiful') + btn_animations.touch() + + # Back to the home screen + self.device.press("KEYCODE_BACK") + + chapters = ['Browser', 'Multicore', 'Metal'] + for chapter in chapters: + self.choose_chapter(chapter) + + # Start benchmark + self.dump_always() + try: + btn_start = self.vc.findViewById("com.quicinc.vellamo:id/main_toolbar_operation_button") + if btn_start: + btn_start.touch() + except ViewNotFoundException: + self.report_result('vellamo3-%s' % chapter, 'fail') + self.logger.error('Start button for chapter %s NOT found, moving to the next chapter...') + continue + + # Wait while Vellamo is running benchmark + finished = False + while not finished: + time.sleep(1) + try: + self.dump_always() + goback_title = self.vc.findViewById("com.quicinc.vellamo:id/main_toolbar_goback_title") + goback_btn = self.vc.findViewById("com.quicinc.vellamo:id/main_toolbar_goback_button") + if goback_btn or goback_title: + btn_no = self.vc.findViewByIdOrRaise("com.quicinc.vellamo:id/button_no") + btn_no.touch() + finished = True + except ViewNotFoundException: + pass + except RuntimeError as e: + print(e) + except ValueError as ve: + print(ve) + + self.logger.info("Benchmark finished: %s" % chapter) + self.device.press("KEYCODE_BACK") + self.device.press("KEYCODE_BACK") + + def parseResult(self): + raw_result_file = '%s/chapterscores-itr%s.json' % (self.config['output'], self.config['itr']) + self.call_adb('pull /data/data/com.quicinc.vellamo/files/chapterscores.json %s' % raw_result_file) + default_unit = 'Points' + # This is one-line file, read it in a whole + fileopen = open(raw_result_file, 'r') + jsoncontent = json.load(fileopen) + result_flag = 'benchmark_results' + chapter_flag = 'chapter_name' + + total_score = 0 + for item in jsoncontent: + if result_flag and chapter_flag in item.keys(): + chapter = item[chapter_flag] + chapter_total = 0 + self.logger.info('%s test result found in category: %s' % (str(len(item[result_flag])), chapter)) + for elem in item[result_flag]: + if 'failed' in elem.keys() and 'id' in elem.keys() and 'score' in elem.keys(): + # Pick up the result + if elem['failed'] is False: + result = 'pass' + else: + result = 'fail' + # Pick up the full test name + testcase = chapter + '-' + elem['id'] + # Pick up the test score + score = elem['score'] + # Submit the result to LAVA + self.report_result("vellamo3-" + testcase, result, str(score), default_unit) + chapter_total = chapter_total + score + else: + print('Corrupted test result found, please check it manually.') + print('A valid test result must contain id, score and pass/fail status.') + + self.report_result("vellamo3-" + chapter + "-total", "pass", str(chapter_total), default_unit) + total_score = total_score + chapter_total + else: + print('Cannot find %s or %s in test result dictionary. Please check it manually.' % (result_flag, chapter_flag)) + + fileopen.close() + self.report_result("vellamo3-total-score", "pass", str(total_score), default_unit) -- cgit v1.2.3