summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorYongqin Liu <yongqin.liu@linaro.org>2015-06-09 23:23:06 +0800
committerYongqin Liu <yongqin.liu@linaro.org>2015-06-11 13:46:52 +0800
commitac9ab343a559ef0945cf498f9e19a41750bb8abb (patch)
treee7971c004ee4104cb67189f4763017f0057aa4a9
parentaca32cd731eb7cb9f2aac7963bdb131b7a8b6646 (diff)
Add support for output the standard deviation
Add the prefix for all the tests And output the standard deviation and standard error for each test item Except the GFCBench3, all other tests have been verified on LAVA. Change-Id: I54dc04cdbf3ed9e36e996264d56a3634e722a0b1 Signed-off-by: Yongqin Liu <yongqin.liu@linaro.org>
-rwxr-xr-xGFXBench3/vc.py76
-rwxr-xr-xandebenchpro/execute.sh4
-rwxr-xr-xantutu2/vc.py61
-rwxr-xr-xantutu3/vc.py28
-rwxr-xr-xantutu4/execute.sh16
-rwxr-xr-xantutu4/vc.py93
-rwxr-xr-xantutu5.7/vc.py39
-rw-r--r--application-benchmark-host.yaml2
-rwxr-xr-xbenchmarkpi/vc.py2
-rwxr-xr-xcf-bench/vc.py4
-rw-r--r--common/common2.sh51
-rw-r--r--common/statistic_average.sh71
-rwxr-xr-xgearses2eclair/vc.py5
-rwxr-xr-xgeekbench3/vc.py8
-rwxr-xr-xglbenchmark-2.5.1/vc.py4
-rw-r--r--javawhetstone/vc.py2
-rw-r--r--lava-android-benchmark-host.yaml10
-rw-r--r--linpack/vc.py8
-rw-r--r--linpackjava/vc.py4
-rwxr-xr-xquadrantpro/execute.sh2
-rw-r--r--scimark/vc.py21
-rwxr-xr-xsqlite/vc.py7
22 files changed, 299 insertions, 219 deletions
diff --git a/GFXBench3/vc.py b/GFXBench3/vc.py
index fc78bd4..09a64fc 100755
--- a/GFXBench3/vc.py
+++ b/GFXBench3/vc.py
@@ -6,42 +6,84 @@ from subprocess import call
from com.dtmilano.android.viewclient import ViewClient, ViewNotFoundException
+
+parent_dir = os.path.realpath(os.path.dirname(__file__))
+f_output_result="%s/../common/output-test-result.sh" % parent_dir
+
+
default_unit = 'points'
def get_score_with_content_desc(vc, content_desc, offset=1):
score_view = vc.findViewWithText(content_desc)
score_uid = score_view.getUniqueId()
uid = int(re.search("id/no_id/(?P<uid>\d+)", score_uid).group('uid'))
score = vc.findViewByIdOrRaise("id/no_id/%s" % (uid + offset))
- call(['lava-test-case', content_desc, '--result', 'pass', '--measurement', score.getText(), '--units', default_unit])
+ call([f_output_result, "GFXBench3_" + content_desc.replace(" ", "_"), 'pass', score.getText(), default_unit])
+
+
+def dump_always():
+ success = False
+ while not success:
+ try:
+ vc.dump()
+ success = True
+ except RuntimeError:
+ print("Got RuntimeError when call vc.dump()")
+ time.sleep(5)
+ except ValueError:
+ print("Got ValueError when call vc.dump()")
+ time.sleep(5)
+
kwargs1 = {'verbose': False, 'ignoresecuredevice': False}
device, serialno = ViewClient.connectToDeviceOrExit(**kwargs1)
kwargs2 = {'startviewserver': True, 'forceviewserveruse': False, 'autodump': False, 'ignoreuiautomatorkilled': True, 'compresseddump': False}
vc = ViewClient(device, serialno, **kwargs2)
-time.sleep(2)
-vc.dump(window='-1')
# Accept License
-btn_license = vc.findViewByIdOrRaise("android:id/button1")
-btn_license.touch()
-vc.dump(window='-1')
+time.sleep(2)
+dump_always()
+btn_license = vc.findViewById("android:id/button1")
+if btn_license:
+ btn_license.touch()
# Accept Active Internet connection
-btn_accept = vc.findViewByIdOrRaise("android:id/button1")
-btn_accept.touch()
-time.sleep(15)
-vc.dump(window='-1')
+time.sleep(2)
+dump_always()
+btn_accept = vc.findViewById("android:id/button1")
+if btn_accept:
+ btn_accept.touch()
+
+server_connected = False
+while not server_connected:
+ try:
+ time.sleep(15)
+ dump_always()
+ alert_not_connected = vc.findViewWithText(u'GFXBench could not reach our servers. Please come back later.')
+ if alert_not_connected:
+ btn_retry = vc.findViewWithTextOrRaise(u'Retry')
+ btn_retry.touch()
+ continue
+ text_connecting = vc.findViewWithText(u'Connecting to server.')
+ if text_connecting:
+ continue
+ server_connected = True
+ except ViewNotFoundException:
+ pass
+
# Accept Data Sync and Download content
-btn_accept_1 = vc.findViewByIdOrRaise("android:id/button1")
-btn_accept_1.touch()
+time.sleep(15)
+dump_always()
+btn_accept_1 = vc.findViewById("android:id/button1")
+if btn_accept_1:
+ btn_accept_1.touch()
# Wait for download to finish
finished = False
while (not finished):
- time.sleep(50)
try:
- vc.dump(window='-1')
+ time.sleep(50)
+ dump_always()
vc.findViewByIdOrRaise("android:id/content")
except ViewNotFoundException:
finished = True
@@ -56,13 +98,13 @@ test.touch()
# Wait while benchmark is running
finished = False
while (not finished):
- time.sleep(50)
try:
- vc.dump(window='-1')
+ time.sleep(50)
+ dump_always()
vc.findViewByIdOrRaise("com.glbenchmark.glbenchmark27:id/cell_result_maincolumn")
except ViewNotFoundException:
finished = True
- pass
+ npass
except RuntimeError as e:
print e
print "benchmark finished"
diff --git a/andebenchpro/execute.sh b/andebenchpro/execute.sh
index 6cfb5df..47a6e03 100755
--- a/andebenchpro/execute.sh
+++ b/andebenchpro/execute.sh
@@ -45,9 +45,9 @@ function get_result(){
result="pass"
fi
if [ "X${result}" = "Xpass" ]; then
- output_test_result "${key}" "pass" "${value}" "${units}"
+ output_test_result "andebenchpro_${key}" "pass" "${value}" "${units}"
else
- output_test_result "${key}" "fail"
+ output_test_result "andebenchpro_${key}" "fail"
fi
done
}
diff --git a/antutu2/vc.py b/antutu2/vc.py
index d9e1ee6..ef3f258 100755
--- a/antutu2/vc.py
+++ b/antutu2/vc.py
@@ -6,13 +6,37 @@ from subprocess import call
from com.dtmilano.android.viewclient import ViewClient, ViewNotFoundException
+parent_dir = os.path.realpath(os.path.dirname(__file__))
+f_output_result="%s/../common/output-test-result.sh" % parent_dir
+
+default_unit = 'points'
+
kwargs1 = {'verbose': False, 'ignoresecuredevice': False}
device, serialno = ViewClient.connectToDeviceOrExit(**kwargs1)
kwargs2 = {'startviewserver': True, 'forceviewserveruse': False, 'autodump': False, 'ignoreuiautomatorkilled': True, 'compresseddump': False}
vc = ViewClient(device, serialno, **kwargs2)
-time.sleep(5)
-vc.dump(window='-1')
+
+def dump_always():
+ success = False
+ while not success:
+ try:
+ vc.dump()
+ success = True
+ except RuntimeError:
+ print("Got RuntimeError when call vc.dump()")
+ time.sleep(5)
+ except ValueError:
+ print("Got ValueError when call vc.dump()")
+ time.sleep(5)
+
+
+def output_result(test_name, measurement):
+ call([f_output_result, "antutu282_" + test_name, 'pass', measurement, default_unit])
+
+
+time.sleep(5)
+dump_always()
# release info and upgrade dialog are not presented
# if there is no connection to Internet
try:
@@ -22,7 +46,7 @@ except ViewNotFoundException:
pass
try:
- vc.dump(window='-1')
+ dump_always()
time.sleep(2)
button_ok = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/button_ok")
button_ok.touch()
@@ -30,12 +54,12 @@ except ViewNotFoundException:
pass
time.sleep(2)
-vc.dump(window='-1')
+dump_always()
button_test = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/btn_test")
button_test.touch()
time.sleep(2)
-vc.dump(window='-1')
+dump_always()
button_start_test = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/button_test")
button_start_test.touch()
@@ -43,7 +67,7 @@ finished = False
while(not finished):
time.sleep(1)
try:
- vc.dump(window='-1')
+ dump_always()
if vc.findViewById("com.antutu.ABenchMark:id/layoutScoresHeader"):
finished = True
except RuntimeError as e:
@@ -55,20 +79,20 @@ print "benchmark finished"
# close unnecessary windows if they appear
for index in range(0, 3):
time.sleep(1)
- vc.dump(window='-1')
+ dump_always()
if vc.findViewById("com.antutu.ABenchMark:id/num_1"):
break
else:
device.press('KEYCODE_BACK')
time.sleep(2)
-vc.dump(window='-1')
+dump_always()
header = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/layoutScoresHeader")
if not vc.findViewById("com.antutu.ABenchMark:id/layoutScores"):
header.touch()
time.sleep(2)
-vc.dump(window='-1')
+dump_always()
mem_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/text_mem")
cpu_int_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/text_int")
cpu_float_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/text_float")
@@ -77,18 +101,17 @@ threed_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/text_3d")
db_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/text_db")
sd_write_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/text_sdw")
sd_read_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/text_sdr")
-default_unit = 'Inapplicable'
-call(['lava-test-case', '"AnTuTu 2.8.2 CPU Integer Score"', '--result', 'pass', '--measurement', cpu_int_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 2.8.2 CPU Float Score"', '--result', 'pass', '--measurement', cpu_float_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 2.8.2 2D Score"', '--result', 'pass', '--measurement', twod_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 2.8.2 3D Score"', '--result', 'pass', '--measurement', threed_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 2.8.2 Mem Score"', '--result', 'pass', '--measurement', mem_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 2.8.2 DB Score"', '--result', 'pass', '--measurement', db_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 2.8.2 SD Write Score"', '--result', 'pass', '--measurement', sd_write_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 2.8.2 SD Read Score"', '--result', 'pass', '--measurement', sd_read_score.getText(), '--units', default_unit])
+output_result("CPU_Integer_Score", cpu_int_score.getText())
+output_result("CPU_Float_Score", cpu_float_score.getText())
+output_result("2D_Score", twod_score.getText())
+output_result("3D_Score", threed_score.getText())
+output_result("Mem_Score", mem_score.getText())
+output_result("DB_Score", db_score.getText())
+output_result("SD_Write_Score", sd_write_score.getText().strip().split(' ').pop())
+output_result("SD_Read_Score", sd_read_score.getText().strip().split(' ').pop())
total_score = int(cpu_int_score.getText().strip()) + int(cpu_float_score.getText().strip()) + int(twod_score.getText().strip())
total_score = total_score + int(threed_score.getText().strip()) + int(mem_score.getText().strip()) + int(db_score.getText().strip())
total_score = total_score + int(sd_write_score.getText().strip().split(' ').pop()) + int(sd_read_score.getText().strip().split(' ').pop())
-call(['lava-test-case', '"AnTuTu 2.8.2 Total Score"', '--result', 'pass', '--measurement', str(total_score), '--units', default_unit])
+output_result("total_score", str(total_score))
diff --git a/antutu3/vc.py b/antutu3/vc.py
index ea8115e..156498a 100755
--- a/antutu3/vc.py
+++ b/antutu3/vc.py
@@ -6,6 +6,11 @@ from subprocess import call
from com.dtmilano.android.viewclient import ViewClient, ViewNotFoundException
+parent_dir = os.path.realpath(os.path.dirname(__file__))
+f_output_result="%s/../common/output-test-result.sh" % parent_dir
+
+default_unit = 'points'
+
kwargs1 = {'verbose': False, 'ignoresecuredevice': False}
device, serialno = ViewClient.connectToDeviceOrExit(**kwargs1)
kwargs2 = {'startviewserver': True, 'forceviewserveruse': False, 'autodump': False, 'ignoreuiautomatorkilled': True, 'compresseddump': False}
@@ -25,6 +30,10 @@ def dump_always():
time.sleep(5)
+def output_result(test_name, measurement):
+ call([f_output_result, "antutu332_" + test_name, 'pass', measurement, default_unit])
+
+
# release info and upgrade dialog are not presented
# if there is no connection to Internet
try:
@@ -92,18 +101,17 @@ threed_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/text_3d")
db_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/text_db")
sd_write_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/text_sdw")
sd_read_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/text_sdr")
-default_unit = 'points'
-call(['lava-test-case', '"AnTuTu 3.3.2 CPU Integer Score"', '--result', 'pass', '--measurement', cpu_int_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 3.3.2 CPU Float Score"', '--result', 'pass', '--measurement', cpu_float_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 3.3.2 2D Score"', '--result', 'pass', '--measurement', twod_score.getText().split(" ")[1], '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 3.3.2 3D Score"', '--result', 'pass', '--measurement', threed_score.getText().split(" ")[1], '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 3.3.2 Mem Score"', '--result', 'pass', '--measurement', mem_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 3.3.2 DB Score"', '--result', 'pass', '--measurement', db_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 3.3.2 SD Write Score"', '--result', 'pass', '--measurement', sd_write_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 3.3.2 SD Read Score"', '--result', 'pass', '--measurement', sd_read_score.getText(), '--units', default_unit])
+output_result("CPU_Integer_Score", cpu_int_score.getText())
+output_result("CPU_Float_Score", cpu_float_score.getText())
+output_result("2D_Score", twod_score.getText().strip().split(" ")[1])
+output_result("3D_Score", threed_score.getText().strip().split(" ")[1])
+output_result("Mem_Score", mem_score.getText())
+output_result("DB_Score", db_score.getText())
+output_result("SD_Write_Score", sd_write_score.getText().strip().split(' ').pop())
+output_result("SD_Read_Score", sd_read_score.getText().strip().split(' ').pop())
total_score = int(cpu_int_score.getText().strip()) + int(cpu_float_score.getText().strip()) + int(twod_score.getText().strip().split(" ")[1])
total_score = total_score + int(threed_score.getText().strip().split(" ")[1]) + int(mem_score.getText().strip()) + int(db_score.getText().strip())
total_score = total_score + int(sd_write_score.getText().strip().split(' ').pop()) + int(sd_read_score.getText().strip().split(' ').pop())
-call(['lava-test-case', '"AnTuTu 3.3.2 Total Score"', '--result', 'pass', '--measurement', str(total_score), '--units', default_unit])
+output_result("total_score", str(total_score))
diff --git a/antutu4/execute.sh b/antutu4/execute.sh
deleted file mode 100755
index 9dc77ab..0000000
--- a/antutu4/execute.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-
-#need to be defined for different benchmark apks
-activity="com.antutu.ABenchMark/.ABenchMarkStart"
-apk_file_name="antutu_benchmark_4.0.3.apk"
-test_method="python vc.py"
-apk_package="com.antutu.ABenchMark"
-
-#following should no need to modify
-parent_dir=`dirname ${0}`
-source "${parent_dir}/../common/common.sh"
-main "$@"
-
-
-
-
diff --git a/antutu4/vc.py b/antutu4/vc.py
deleted file mode 100755
index 9d983ff..0000000
--- a/antutu4/vc.py
+++ /dev/null
@@ -1,93 +0,0 @@
-import re
-import sys
-import os
-import time
-from subprocess import call
-
-from com.dtmilano.android.viewclient import ViewClient, ViewNotFoundException
-
-kwargs1 = {'verbose': False, 'ignoresecuredevice': False}
-device, serialno = ViewClient.connectToDeviceOrExit(**kwargs1)
-kwargs2 = {'startviewserver': True, 'forceviewserveruse': False, 'autodump': False, 'ignoreuiautomatorkilled': True, 'compresseddump': False}
-vc = ViewClient(device, serialno, **kwargs2)
-
-#Wait while application loads
-time.sleep(2)
-vc.dump(window='-1')
-
-# Close the update dialog if it exists
-try:
- cncl_btn = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/negative_btn")
- cncl_btn.touch()
-except ViewNotFoundException:
- pass
-
-#Start test button
-vc.dump(window='-1')
-start_button = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/test_btn")
-start_button.touch()
-
-#Start all test button
-vc.dump(window='-1')
-start_test_button = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/test_all_btn")
-start_test_button.touch()
-
-#Wait while antutu4 is running benchmark
-finished = False
-while(not finished):
- try:
- time.sleep(1)
- vc.dump('-1')
- progress_button = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/detail_content")
- finished = True
- except ViewNotFoundException:
- pass
- except ValueError:
- print "Problem with UIAutomator"
-
-print("Benchmark Finished")
-
-#Change view to Test tab
-vc.dump(window='-1')
-start_test_tab_button = vc.findViewByIdOrRaise("id/no_id/16")
-start_test_tab_button.touch()
-#GEt detail scores
-vc.dump(window='-1')
-detail_detail_button = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/detail_btn")
-detail_detail_button.touch()
-
-#start_button = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/score_text")
-#text = start_button.getText()
-
-#Get the score
-vc.dump(window='-1')
-multitask_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/ue_multitask_text")
-dalvik_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/ue_dalvik_text")
-cpu_integer_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/cpu_int_text")
-cpu_float_point_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/cpu_float_text")
-ram_operation_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/mem_text")
-ram_speed_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/ram_text")
-twod_graphics_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/gpu_2d_text")
-threed_graphics_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/gpu_3d_text")
-storage_io_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/io_sdw_text")
-database_io_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/io_db_text")
-default_unit = 'Points'
-
-call(['lava-test-case', '"AnTuTu 4.0.3 UX Multitask Score"', '--result', 'pass', '--measurement', multitask_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 4.0.3 UX Dalvik Score"', '--result', 'pass', '--measurement', dalvik_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 4.0.3 CPU Integer Score"', '--result', 'pass', '--measurement', cpu_integer_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 4.0.3 CPU Float-Point Score"', '--result', 'pass', '--measurement', cpu_float_point_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 4.0.3 RAM Operation Score"', '--result', 'pass', '--measurement', ram_operation_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 4.0.3 RAM Speed Score"', '--result', 'pass', '--measurement', ram_speed_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 4.0.3 GPU 2D Graphics Score"', '--result', 'pass', '--measurement', twod_graphics_score.getText().split(" ")[1], '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 4.0.3 GPU 3D Graphics Score"', '--result', 'pass', '--measurement', threed_graphics_score.getText().split(" ")[1], '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 4.0.3 IO Storage I/O Score"', '--result', 'pass', '--measurement', storage_io_score.getText(), '--units', default_unit])
-call(['lava-test-case', '"AnTuTu 4.0.3 IO Database I/O Score"', '--result', 'pass', '--measurement', database_io_score.getText(), '--units', default_unit])
-
-total_score = 0
-total_score = total_score + int(multitask_score.getText().strip()) + int(dalvik_score.getText().strip())
-total_score = total_score + int(cpu_integer_score.getText().strip()) + int(cpu_float_point_score.getText().strip())
-total_score = total_score + int(ram_operation_score.getText().strip()) + int(ram_speed_score.getText().strip())
-total_score = total_score + int(twod_graphics_score.getText().strip().split(" ")[1]) + int(threed_graphics_score.getText().strip().split(" ")[1])
-total_score = total_score + int(storage_io_score.getText().strip().split(' ').pop()) + int(database_io_score.getText().strip().split(' ').pop())
-call(['lava-test-case', '"AnTuTu 4.0.3 Total Score"', '--result', 'pass', '--measurement', str(total_score), '--units', default_unit])
diff --git a/antutu5.7/vc.py b/antutu5.7/vc.py
index db2996e..0250040 100755
--- a/antutu5.7/vc.py
+++ b/antutu5.7/vc.py
@@ -73,11 +73,18 @@ def run_test(prefix=""):
start_test_button.touch()
else:
retest_btn = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/retest_text")
- retest_btn.touch()
+ if retest_btn:
+ retest_btn.touch()
+ else:
+ print("com.antutu.ABenchMark:id/retest_text not found")
+
time.sleep(2)
dump_always()
retest_btn = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/retest_btn")
- retest_btn.touch()
+ if retest_btn:
+ retest_btn.touch()
+ else:
+ print("com.antutu.ABenchMark:id/retest_btn not found")
time.sleep(5)
@@ -148,28 +155,30 @@ def run_test(prefix=""):
def get_result(prefix=""):
if prefix:
- prefix = "%s_" % prefix
+ prefix = "antutu57_%s_" % prefix
+ else:
+ prefix="antutu57_"
#Get the score
antutu_sum = 0;
dump_always()
multitask_view = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/ue_multitask_text")
multitask_score = multitask_view.getText().strip()
- call([f_output_result, "%santutu_ue_multitask" % prefix, 'pass', multitask_score, 'points'])
+ call([f_output_result, "%s_ue_multitask" % prefix, 'pass', multitask_score, 'points'])
antutu_sum = antutu_sum + int(multitask_score)
runtime_view = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/ue_dalvik_text")
runtime_score = runtime_view.getText().strip()
- call([f_output_result, "%santutu_ue_runtime" % prefix, 'pass', runtime_score, 'points'])
+ call([f_output_result, "%s_ue_runtime" % prefix, 'pass', runtime_score, 'points'])
antutu_sum = antutu_sum + int(runtime_score)
cpu_multi_integer_view = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/cpu_int_text")
cpu_multi_integer_score = cpu_multi_integer_view.getText().strip()
- call([f_output_result, "%santutu_cpu_integer" % prefix, 'pass', cpu_multi_integer_score, 'points'])
+ call([f_output_result, "%s_cpu_integer" % prefix, 'pass', cpu_multi_integer_score, 'points'])
antutu_sum = antutu_sum + int(cpu_multi_integer_score)
cpu_multi_float_point_view = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/cpu_float_text")
cpu_multi_float_point_score = cpu_multi_float_point_view.getText().strip()
- call([f_output_result, "%santutu_cpu_float_point" % prefix, 'pass', cpu_multi_float_point_score, 'points'])
+ call([f_output_result, "%s_cpu_float_point" % prefix, 'pass', cpu_multi_float_point_score, 'points'])
antutu_sum = antutu_sum + int(cpu_multi_float_point_score)
device.press('DPAD_DOWN')
@@ -179,28 +188,28 @@ def get_result(prefix=""):
dump_always()
cpu_single_integer_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/cpu_int_text2")
- call([f_output_result, "%santutu_single_thread_integer" % prefix, 'pass', cpu_single_integer_score.getText().strip(), 'points'])
+ call([f_output_result, "%s_single_thread_integer" % prefix, 'pass', cpu_single_integer_score.getText().strip(), 'points'])
antutu_sum = antutu_sum + int(cpu_single_integer_score.getText().strip())
cpu_single_float_point_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/cpu_float_text2")
- call([f_output_result, "%santutu_single_float_point" % prefix, 'pass', cpu_single_float_point_score.getText().strip(), 'points'])
+ call([f_output_result, "%s_single_float_point" % prefix, 'pass', cpu_single_float_point_score.getText().strip(), 'points'])
antutu_sum = antutu_sum + int(cpu_single_float_point_score.getText().strip())
ram_operation_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/mem_text")
- call([f_output_result, "%santutu_single_ram_operation" % prefix, 'pass', ram_operation_score.getText().strip(), 'points'])
+ call([f_output_result, "%s_single_ram_operation" % prefix, 'pass', ram_operation_score.getText().strip(), 'points'])
antutu_sum = antutu_sum + int(ram_operation_score.getText().strip())
ram_speed_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/ram_text")
- call([f_output_result, "%santutu_single_ram_speed" % prefix, 'pass', ram_speed_score.getText().strip(), 'points'])
+ call([f_output_result, "%s_single_ram_speed" % prefix, 'pass', ram_speed_score.getText().strip(), 'points'])
antutu_sum = antutu_sum + int(ram_speed_score.getText().strip())
twod_graphics_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/gpu_2d_text")
- call([f_output_result, "%santutu_2D_graphics" % prefix, 'pass', twod_graphics_score.getText().strip(), 'points'])
+ call([f_output_result, "%s_2D_graphics" % prefix, 'pass', twod_graphics_score.getText().strip(), 'points'])
antutu_sum = antutu_sum + int(twod_graphics_score.getText().strip())
threed_graphics_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/gpu_3d_text")
score_3d = threed_graphics_score.getText().strip()
- call([f_output_result, "%santutu_3D_graphics" % prefix, 'pass', score_3d.split(" ").pop(), 'points'])
+ call([f_output_result, "%s_3D_graphics" % prefix, 'pass', score_3d.split(" ").pop(), 'points'])
antutu_sum = antutu_sum + int(score_3d.split(" ").pop())
storage_io_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/io_sdw_text")
@@ -208,10 +217,10 @@ def get_result(prefix=""):
antutu_sum = antutu_sum + int(storage_io_score.getText().strip())
database_io_score = vc.findViewByIdOrRaise("com.antutu.ABenchMark:id/io_db_text")
- call([f_output_result, "%santutu_database_io" % prefix, 'pass', database_io_score.getText().strip(), 'points'])
+ call([f_output_result, "%s_database_io" % prefix, 'pass', database_io_score.getText().strip(), 'points'])
antutu_sum = antutu_sum + int(database_io_score.getText().strip())
- call([f_output_result, "%santutu_total" % prefix, 'pass', str(antutu_sum), 'points'])
+ call([f_output_result, "%s_total" % prefix, 'pass', str(antutu_sum), 'points'])
def main():
diff --git a/application-benchmark-host.yaml b/application-benchmark-host.yaml
index 00152d3..2dfcab8 100644
--- a/application-benchmark-host.yaml
+++ b/application-benchmark-host.yaml
@@ -24,7 +24,7 @@ metadata:
params:
APP_CONFIG_LIST: ""
- LOOP_COUNT: 12
+ LOOP_COUNT: 13
COLLECT_STREAMLINE: "false"
run:
diff --git a/benchmarkpi/vc.py b/benchmarkpi/vc.py
index 0902061..9cb9648 100755
--- a/benchmarkpi/vc.py
+++ b/benchmarkpi/vc.py
@@ -57,7 +57,7 @@ else:
sys.exit(1)
# Submit the test result to LAVA
-collect_score(benchmark_name, run_result, score_number, score_unit)
+collect_score("benchmarkpi_" + benchmark_name, run_result, score_number, score_unit)
# Exit the app
vc.dump(window='-1')
diff --git a/cf-bench/vc.py b/cf-bench/vc.py
index 03b9841..cd4f8f3 100755
--- a/cf-bench/vc.py
+++ b/cf-bench/vc.py
@@ -34,10 +34,10 @@ def get_score_with_content_desc(vc, content_desc, offset=1):
score_text = score.getText()
if score_text.find("%") > 0:
score_value, units = score_text.split(" ")
- call([f_output_result, content_desc, 'pass', score_value, units])
+ call([f_output_result, "cfbench_" + content_desc.replace(" ", "_"), 'pass', score_value, units])
else:
- call([f_output_result, content_desc, 'pass', score_text, default_unit])
+ call([f_output_result, "cfbench_" + content_desc.replace(" ", "_"), 'pass', score_text, default_unit])
except ViewNotFoundException:
print "%s not found" % (content_desc)
pass
diff --git a/common/common2.sh b/common/common2.sh
index 1dcda37..67c1c4d 100644
--- a/common/common2.sh
+++ b/common/common2.sh
@@ -20,9 +20,10 @@ D_SCREENSHOT="${D_RAWDATA}/screenshots"
COLLECT_STREAMLINE=false
SERIAL=""
G_APPS=""
-G_LOOP_COUNT=1
+G_LOOP_COUNT=13
[ -z "${G_RECORD_LOCAL_CSV}" ] && G_RECORD_LOCAL_CSV=TRUE
[ -z "${G_VERBOSE_OUTPUT}" ] && G_VERBOSE_OUTPUT=FALSE
+[ -z "${G_RECORD_STATISTICS}" ] && G_RECORD_STATISTICS=TRUE
BASE_URL=""
## Description:
@@ -407,8 +408,9 @@ func_print_usage_common(){
echo " --base-url: specify the based url where the apks will be gotten from"
echo " --loop-count: specify the number that how many times should be run for each application to get the average result, default is 12"
echo " --record-csv: specify if record the result in csv format file."
- echo " Only record the file when TRUE is specified."
- echo " --verbose-output: output the result and lava-test-case command for each test case each time it is run"
+ echo " Only record the file when TRUE is specified. Default is TRUE"
+ echo " --verbose-output: output the result and lava-test-case command for each test case each time it is run. Default is FALSE."
+ echo " --record-statistics: output the statistics data as the test result. default is TRUE"
echo " --streamline: specify if we need to collect the streamline data, true amd false can be specified, default is fasle"
echo " APP_CONFIG_LIST: specify the configurations for each application as following format:"
echo " APK_NAME,PACKAGE_NAME/APP_ACTIVITY,APP_NICKNAME"
@@ -424,6 +426,7 @@ func_parse_parameters_common(){
local para_apps=""
local para_record_csv=""
local para_verbose_output=""
+ local para_record_statistics=""
while [ -n "$1" ]; do
case "X$1" in
X--base-url)
@@ -434,6 +437,14 @@ func_parse_parameters_common(){
fi
shift 2
;;
+ X--record-statistics)
+ para_record_statistics=$2
+ if [ -z "${para_record_statistics}" ]; then
+ echo "Please specify the value for --record-statistics option"
+ exit 1
+ fi
+ shift 2
+ ;;
X--record-csv)
para_record_csv=$2
if [ -z "${para_record_csv}" ]; then
@@ -517,6 +528,12 @@ func_parse_parameters_common(){
elif [ -n "${para_record_csv}" ];then
G_VERBOSE_OUTPUT=FALSE
fi
+
+ if [ -n "${para_record_statistics}" ] && [ "X${para_record_statistics}" = "XTRUE" ];then
+ G_RECORD_STATISTICS=TRUE
+ elif [ -n "${para_record_statistics}" ]; then
+ G_RECORD_STATISTICS=FALSE
+ fi
}
## Description:
@@ -569,9 +586,26 @@ common_main(){
if [ -f "${F_RAW_DATA_CSV}" ]; then
sort ${F_RAW_DATA_CSV}|tr ' ' '_'|tr -d '=' >${F_RAW_DATA_CSV}.sort
- statistic ${F_RAW_DATA_CSV}.sort 2 |tee ${F_STATISTIC_DATA_CSV}
+ statistic ${F_RAW_DATA_CSV}.sort 2 3|tee ${F_STATISTIC_DATA_CSV}
sed -i 's/=/,/' "${F_STATISTIC_DATA_CSV}"
rm -f ${F_RAW_DATA_CSV}.sort
+
+ if [ "X${G_RECORD_STATISTICS}" = "XTRUE" ] ;then
+ G_RECORD_STATISTICS="FALSE"
+ G_RECORD_LOCAL_CSV="FALSE"
+ local old_record_local_csv="${G_RECORD_LOCAL_CSV}"
+ for line in $(cat "${F_STATISTIC_DATA_CSV}"); do
+ if ! echo "$line"|grep -q ,; then
+ continue
+ fi
+ local key=$(echo $line|cut -d, -f1)
+ local measurement=$(echo $line|cut -d, -f2)
+ local units=$(echo $line|cut -d, -f3)
+ output_test_result "${key}" "pass" "${measurement}" "${units}"
+ done
+ G_RECORD_STATISTICS=TRUE
+ G_RECORD_LOCAL_CSV="${old_record_local_csv}"
+ fi
fi
rm -fr "${F_RAWDAT_ZIP}"
@@ -597,6 +631,8 @@ common_main(){
## rawdata/final_result.csv
## G_VERBOSE_OUTPUT: when this environment variant is set to "TRUE", and only it is TRUE,
## the verbose informain about the result will be outputed
+## G_RECORD_STATISTICS: only when this environment variant is set to "FALSE",
+## lava-test-case will be executed to report the result to LAVA for each raw data.
output_test_result(){
local test_name=$1
local result=$2
@@ -609,11 +645,12 @@ output_test_result(){
local output=""
local lava_paras=""
local output_csv=""
- test_name=$(echo ${test_name}|tr ' ' '_')
+ test_name=$(echo ${test_name}|tr ' ,' '_')
if [ -z "$units" ]; then
units="points"
fi
+ units=$(echo ${units}|tr ' ,' '_')
if [ -z "${measurement}" ]; then
output="${test_name}=${result}"
@@ -622,7 +659,7 @@ output_test_result(){
output="${test_name}=${measurement} ${units}"
lava_paras="${test_name} --result ${result} --measurement ${measurement} --units ${units}"
## should support units after measurement format
- output_csv="${test_name},${measurement}"
+ output_csv="${test_name},${measurement},${units}"
fi
if [ "X${G_VERBOSE_OUTPUT}" = "XTRUE" ];then
@@ -630,7 +667,7 @@ output_test_result(){
fi
local cmd="lava-test-case"
- if [ -n "$(which $cmd)" ];then
+ if [ "X${G_RECORD_STATISTICS}" = "XFALSE" ] && [ -n "$(which $cmd)" ];then
$cmd ${lava_paras}
elif [ "X${G_VERBOSE_OUTPUT}" = "XTRUE" ];then
echo "$cmd ${lava_paras}"
diff --git a/common/statistic_average.sh b/common/statistic_average.sh
index c8ac753..f5bdbfb 100644
--- a/common/statistic_average.sh
+++ b/common/statistic_average.sh
@@ -40,6 +40,29 @@ f_min(){
fi
}
+standard_deviation_error(){
+ local average=$1
+ if [ -z "${average}" ]; then
+ return
+ fi
+ shift
+
+ local values=$1
+ if [ -z "${values}" ]; then
+ return
+ fi
+ shift
+ local deviations_sum=0
+ local count=0
+ for s_value in $values ; do
+ s_deviation=$(echo "${average},${s_value}"|awk -F, '{printf "%.2f",($2-$1)^2;}')
+ deviations_sum=$(echo "${deviations_sum},${s_deviation}"|awk -F, '{printf "%.2f",$1+$2;}')
+ count=$(echo "${count},1"|awk -F, '{printf $1+$2;}')
+ done
+ local deviation=$(echo "${deviations_sum},${count}"|awk -F, '{printf "%.2f",sqrt($1/$2);}')
+ local std_err=$(echo "${deviation},${count}"|awk -F, '{printf "%.2f",$1/sqrt($2);}')
+ echo "${deviation},${std_err}"
+}
## Description:
## calculate the average value for specified csv file.
## The first field of that csv file should be the key/name of that line,
@@ -60,12 +83,15 @@ statistic(){
if [ -z "$field_no" ]; then
field_no=2
fi
+
+ local units_field_no=$3
+ local units=""
+
local total=0
- local max=0
- local min=0
local old_key=""
local new_key=""
local count=0
+ local values=""
for line in $(cat "${f_data}"); do
if ! echo "$line"|grep -q ,; then
continue
@@ -73,32 +99,47 @@ statistic(){
new_key=$(echo $line|cut -d, -f1)
value=$(echo $line|cut -d, -f${field_no})
if [ "X${new_key}" = "X${old_key}" ]; then
- total=$(echo "scale=2; ${total}+${value}"|bc -s)
+ total=$(echo ${total},${value}|awk -F, '{printf "%.2f",$1+$2;}')
+ values="${values} ${value}"
count=$(echo "$count + 1"|bc)
- max=$(f_max "$max" "$value")
- min=$(f_min "$min" "$value")
else
if [ "X${old_key}" != "X" ]; then
- if [ $count -ge 4 ]; then
- average=$(echo "scale=2; ($total-$max-$min)/($count-2)"|bc)
+ local average=$(echo ${total},${count}|awk -F, '{printf "%.2f",$1/$2;}')
+ local sigma_stderr=$(standard_deviation_error "${average}" "${values}")
+ local sigma=$(echo ${sigma_stderr}|cut -d, -f1)
+ local std_err=$(echo ${sigma_stderr}|cut -d, -f2)
+ if [ -z "${units}" ]; then
+ echo "${old_key}=${average}"
+ echo "${old_key}_sigma=${sigma}"
+ echo "${old_key}_std_err=${std_err}"
else
- average=$(echo "scale=2; $total/$count"|bc)
+ echo "${old_key}=${average},${units}"
+ echo "${old_key}_sigma=${sigma},${units}"
+ echo "${old_key}_std_err=${std_err},${units}"
fi
- echo "$old_key=$average"
fi
total="${value}"
- max="${value}"
- min="${value}"
+ values="${value}"
old_key="${new_key}"
count=1
+ if [ -n "${units_field_no}" ]; then
+ units=$(echo $line|cut -d, -f${units_field_no})
+ fi
fi
done
if [ "X${new_key}" != "X" ]; then
- if [ $count -ge 4 ]; then
- average=$(echo "scale=2; ($total-$max-$min)/($count-2)"|bc)
+ local average=$(echo ${total},${count}|awk -F, '{printf "%.2f",$1/$2;}')
+ local sigma_stderr=$(standard_deviation_error "${average}" "${values}")
+ local sigma=$(echo ${sigma_stderr}|cut -d, -f1)
+ local std_err=$(echo ${sigma_stderr}|cut -d, -f2)
+ if [ -z "${units}" ]; then
+ echo "${old_key}=${average}"
+ echo "${old_key}_sigma=${sigma}"
+ echo "${old_key}_std_err=${std_err}"
else
- average=$(echo "scale=2; $total/$count"|bc)
+ echo "${old_key}=${average},${units}"
+ echo "${old_key}_sigma=${sigma},${units}"
+ echo "${old_key}_std_err=${std_err},${units}"
fi
- echo "$new_key=$average"
fi
}
diff --git a/gearses2eclair/vc.py b/gearses2eclair/vc.py
index 6447dd3..8b1181c 100755
--- a/gearses2eclair/vc.py
+++ b/gearses2eclair/vc.py
@@ -6,10 +6,11 @@ import time
from subprocess import call
parent_dir = os.path.realpath(os.path.dirname(__file__))
+f_output_result="%s/../common/output-test-result.sh" % parent_dir
def collect_score(benchmark_name, run_result, score_number, score_unit):
- call(['lava-test-case', benchmark_name, '--result', run_result, '--measurement', str(score_number), '--units', score_unit])
+ call([f_output_result, benchmark_name, run_result, str(score_number), score_unit])
benchmark_name = "GearsES2eclair"
time.sleep(60)
@@ -51,4 +52,4 @@ else:
sys.exit(1)
# Submit the test result to LAVA
-collect_score(benchmark_name, run_result, score_number, score_unit)
+collect_score("gearses2eclair_" + benchmark_name, run_result, score_number, score_unit)
diff --git a/geekbench3/vc.py b/geekbench3/vc.py
index 7c77006..1d35188 100755
--- a/geekbench3/vc.py
+++ b/geekbench3/vc.py
@@ -6,12 +6,16 @@ from subprocess import call
from com.dtmilano.android.viewclient import ViewClient, ViewNotFoundException
curdir = os.path.realpath(os.path.dirname(__file__))
+f_output_result="%s/../common/output-test-result.sh" % curdir
+
def collect_score(testcase, run_result):
- call(['lava-test-case', testcase, '--result', run_result])
+ call([f_output_result, "geekbench3_" + testcase, run_result])
+
def collect_score_with_measurement(testcase, run_result, score_number, score_unit):
- call(['lava-test-case', testcase, '--result', run_result, '--measurement', str(score_number), '--units', score_unit])
+ call([f_output_result, "geekbench3_" + testcase, run_result, str(score_number), score_unit])
+
def all_fail():
print testcase_run + " FAILED!"
diff --git a/glbenchmark-2.5.1/vc.py b/glbenchmark-2.5.1/vc.py
index 384a6f4..e810b19 100755
--- a/glbenchmark-2.5.1/vc.py
+++ b/glbenchmark-2.5.1/vc.py
@@ -9,9 +9,11 @@ from subprocess import call
from com.dtmilano.android.viewclient import ViewClient
curdir = os.path.realpath(os.path.dirname(__file__))
+f_output_result="%s/../common/output-test-result.sh" % curdir
+
def collect_score(benchmark_name, run_result, score_number, score_unit):
- call(['lava-test-case', benchmark_name, '--result', run_result, '--measurement', str(score_number), '--units', score_unit])
+ call([f_output_result, "glbenchmark251_" + benchmark_name, run_result, str(score_number), score_unit])
def getText(node):
children = node.childNodes
diff --git a/javawhetstone/vc.py b/javawhetstone/vc.py
index f5e5003..728d680 100644
--- a/javawhetstone/vc.py
+++ b/javawhetstone/vc.py
@@ -52,7 +52,7 @@ while(not finished):
value = elements[2]
else:
continue
- call([f_output_result, key, 'pass', value, units])
+ call([f_output_result, "javawhetstone_" + key, 'pass', value, units])
except ViewNotFoundException:
pass
except RuntimeError:
diff --git a/lava-android-benchmark-host.yaml b/lava-android-benchmark-host.yaml
index c540446..3bf5cc1 100644
--- a/lava-android-benchmark-host.yaml
+++ b/lava-android-benchmark-host.yaml
@@ -25,7 +25,12 @@ install:
params:
TEST_NAME: "geekbench"
- ITERATIONS: 1
+ APP_CONFIG_LIST: ""
+ LOOP_COUNT: 13
+ COLLECT_STREAMLINE: "false"
+ RECORD_CSV: "TRUE"
+ RECORD_STATISTICS: "TRUE"
+ VERBOSE_OUTPUT: "FALSE"
run:
steps:
@@ -37,6 +42,5 @@ run:
- adb connect $IPADDR
- adb wait-for-device
- adb shell id
- - cd $TEST_NAME
- - for (( LOOP=1; LOOP<=$ITERATIONS; LOOP++ )); do lava-test-case $TEST_NAME-execution-$LOOP --shell ./execute.sh --serial $IPADDR:5555; done;
+ - ./$TEST_NAME/execute.sh --serial $IPADDR:5555 --loop-count "$LOOP_COUNT" --streamline "$COLLECT_STREAMLINE" --verbose-output $VERBOSE_OUTPUT --record-statistics $RECORD_STATISTICS --record-csv $RECORD_CSV
- lava-sync $TEST_NAME-finished
diff --git a/linpack/vc.py b/linpack/vc.py
index 92f72b2..3e85456 100644
--- a/linpack/vc.py
+++ b/linpack/vc.py
@@ -29,8 +29,8 @@ while not start_single_button:
mflops_single_score = vc.findViewByIdOrRaise("com.greenecomputing.linpack:id/txtmflops_result")
time_single_score = vc.findViewByIdOrRaise("com.greenecomputing.linpack:id/txttime_result")
-call([f_output_result, 'LinpackMFLOPSSingleScore', 'pass', mflops_single_score.getText(), 'MFLOPS'])
-call([f_output_result, 'LinpackTimeSingleScore', 'pass', time_single_score.getText(), 'seconds'])
+call([f_output_result, 'Linpack_MFLOPSSingleScore', 'pass', mflops_single_score.getText(), 'MFLOPS'])
+call([f_output_result, 'Linpack_TimeSingleScore', 'pass', time_single_score.getText(), 'seconds'])
start_multi_button = vc.findViewByIdOrRaise("com.greenecomputing.linpack:id/btncalculate")
start_multi_button.touch()
@@ -45,5 +45,5 @@ while not start_single_button:
mflops_multi_score = vc.findViewByIdOrRaise("com.greenecomputing.linpack:id/txtmflops_result")
time_multi_score = vc.findViewByIdOrRaise("com.greenecomputing.linpack:id/txttime_result")
-call([f_output_result, 'LinpackMFLOPSMultiScore', 'pass', mflops_multi_score.getText(), 'MFLOPS'])
-call([f_output_result, 'LinpackTimeMultiScore', 'pass', time_multi_score.getText(), 'seconds'])
+call([f_output_result, 'Linpack_MFLOPSMultiScore', 'pass', mflops_multi_score.getText(), 'MFLOPS'])
+call([f_output_result, 'Linpack_TimeMultiScore', 'pass', time_multi_score.getText(), 'seconds'])
diff --git a/linpackjava/vc.py b/linpackjava/vc.py
index 234f5fd..17cc0fe 100644
--- a/linpackjava/vc.py
+++ b/linpackjava/vc.py
@@ -30,8 +30,8 @@ while(not finished):
finished = True
print "benchmark finished"
speed = res_match.group('measurement').strip()
- print "%s=%s MFLOPS" % ("LinpackJavaSpeed", speed)
- call([f_output_result, "LinpackJavaSpeed", 'pass', speed, 'MFLOPS'])
+ print "%s=%s MFLOPS" % ("LinpackJava_Speed", speed)
+ call([f_output_result, "LinpackJava_Speed", 'pass', speed, 'MFLOPS'])
except ViewNotFoundException:
pass
except RuntimeError:
diff --git a/quadrantpro/execute.sh b/quadrantpro/execute.sh
index d9025a4..dd59dc3 100755
--- a/quadrantpro/execute.sh
+++ b/quadrantpro/execute.sh
@@ -17,7 +17,7 @@ get_result(){
line=$(echo $line|sed 's/\r//g')
key=$(echo $line|cut -d, -f1)
value=$(echo $line|cut -d\, -f2)
- output_test_result "${key}" "pass" "${value}" "points"
+ output_test_result "quadrandpro_${key}" "pass" "${value}" "points"
done
}
diff --git a/scimark/vc.py b/scimark/vc.py
index d059c43..b89cb44 100644
--- a/scimark/vc.py
+++ b/scimark/vc.py
@@ -14,8 +14,21 @@ device, serialno = ViewClient.connectToDeviceOrExit(**kwargs1)
kwargs2 = {'startviewserver': True, 'forceviewserveruse': False, 'autodump': False, 'ignoreuiautomatorkilled': True, 'compresseddump': False}
vc = ViewClient(device, serialno, **kwargs2)
+def dump_always():
+ success = False
+ while not success:
+ try:
+ vc.dump()
+ success = True
+ except RuntimeError:
+ print("Got RuntimeError when call vc.dump()")
+ time.sleep(5)
+ except ValueError:
+ print("Got ValueError when call vc.dump()")
+ time.sleep(5)
+
time.sleep(5)
-vc.dump()
+dump_always()
btn_java_bench = vc.findViewWithTextOrRaise(u'Java bench')
btn_java_bench.touch()
@@ -25,18 +38,18 @@ finished = False
while(not finished):
try:
time.sleep(60)
- vc.dump()
+ dump_always()
results = vc.findViewByIdOrRaise("net.danielroggen.scimark:id/textViewResult")
if results.getText().find("Done") > 0:
finished = True
print "benchmark finished"
- for line in results.getText().split("\n"):
+ for line in results.getText().replace(": \n", ":").split("\n"):
line = str(line.strip())
key_val = line.split(":")
if len(key_val) == 2:
if key_val[0].strip() in keys:
key = key_val[0].strip().replace(' ', '_').replace('(', '').replace(')', '').replace(',', '')
- call([f_output_result, key, 'pass', key_val[1].strip(), 'Mflops'])
+ call([f_output_result, "scimark_" + key, 'pass', key_val[1].strip(), 'Mflops'])
except ViewNotFoundException:
pass
except RuntimeError:
diff --git a/sqlite/vc.py b/sqlite/vc.py
index 313118a..f23700c 100755
--- a/sqlite/vc.py
+++ b/sqlite/vc.py
@@ -6,13 +6,18 @@ from subprocess import call
from com.dtmilano.android.viewclient import ViewClient, ViewNotFoundException
+parent_dir = os.path.realpath(os.path.dirname(__file__))
+f_output_result="%s/../common/output-test-result.sh" % parent_dir
default_unit = 'points'
+
+
def get_score_with_text(vc, text, offset=1):
score_view = vc.findViewWithTextOrRaise(text)
score_uid = score_view.getUniqueId()
uid = int(re.search("id/no_id/(?P<uid>\d+)", score_uid).group('uid'))
score = vc.findViewByIdOrRaise("id/no_id/%s" % (uid + offset))
- call(['lava-test-case', text.strip(), '--result', 'pass', '--measurement', score.getText().strip(), '--units', default_unit])
+ call([f_output_result, "sqlite_" + text.strip().replace(" ", "_"), 'pass', score.getText().strip(), default_unit])
+
kwargs1 = {'verbose': False, 'ignoresecuredevice': False}
device, serialno = ViewClient.connectToDeviceOrExit(**kwargs1)