summaryrefslogtreecommitdiff
path: root/automated
diff options
context:
space:
mode:
authorNaresh Kamboju <naresh.kamboju@linaro.org>2017-01-20 16:05:14 +0530
committerMilosz Wasilewski <milosz.wasilewski@linaro.org>2017-01-24 09:16:48 +0000
commit76179f4e8efddd36e2910bf1edf166486c03447e (patch)
tree89af3db215c7a12eb914adfd31ce36359e97dbf8 /automated
parent594cd924125ae7b02a4cbb2fa0a77936fbda00c6 (diff)
downloadtest-definitions-pipe-76179f4e8efddd36e2910bf1edf166486c03447e.tar.gz
automated: Adding ltp-realtime
LTP realtime tests is an open-source testsuite for testing real-time Linux. The testsuite contains some functional tests and a few performance and latency measurement tests. Change-Id: I0488cdb639b5bc349b7b25f85d43339417eafeb8 Signed-off-by: Naresh Kamboju <naresh.kamboju@linaro.org>
Diffstat (limited to 'automated')
-rwxr-xr-xautomated/linux/ltp-realtime/ltp-realtime.py229
-rwxr-xr-xautomated/linux/ltp-realtime/ltp-realtime.sh91
-rw-r--r--automated/linux/ltp-realtime/ltp-realtime.yaml38
3 files changed, 358 insertions, 0 deletions
diff --git a/automated/linux/ltp-realtime/ltp-realtime.py b/automated/linux/ltp-realtime/ltp-realtime.py
new file mode 100755
index 0000000..8a5cda7
--- /dev/null
+++ b/automated/linux/ltp-realtime/ltp-realtime.py
@@ -0,0 +1,229 @@
+#!/usr/bin/python
+import re
+import sys
+import fileinput
+
+
+# extract a standard results block from the stream
+def standard_results():
+ minimum = re.compile("^Min:\s+(?P<min>[\d\.]+)\s+(?P<units>\w+)")
+ maximum = re.compile("^Max:\s+(?P<max>[\d\.]+)\s+(?P<units>\w+)")
+ average = re.compile("^Avg:\s+(?P<average>[\d\.]+)\s+(?P<units>\w+)")
+ standarddev = re.compile("^StdDev:\s+(?P<stddev>[\d\.]+)\s+(?P<units>\w+)")
+ finished = 0
+ for line in sys.stdin:
+ for parser in [maximum, minimum, average, standarddev]:
+ result = parser.search(line)
+ if result is not None:
+ if parser is minimum:
+ test_min = result.group('min')
+ units = result.group('units')
+ print "%s%s_min pass %s %s " % (test_name, test_args, test_min, units)
+ finished += 1
+ break
+ if parser is maximum:
+ test_max = result.group('max')
+ units = result.group('units')
+ finished += 1
+ print "%s%s_max pass %s %s " % (test_name, test_args, test_max, units)
+ break
+ if parser is average:
+ test_avg = result.group('average')
+ units = result.group('units')
+ print "%s%s_avg pass %s %s " % (test_name, test_args, test_avg, units)
+ finished += 1
+ break
+ if parser is standarddev:
+ test_stddev = result.group('stddev')
+ units = result.group('units')
+ print "%s%s_stddev pass %s %s " % (test_name, test_args, test_stddev, units)
+ finished += 1
+ break
+ else:
+ continue
+ if finished == 4:
+ return
+
+ print "ERROR: Parser failed and ran to EOF"
+ sys.exit(-1)
+
+
+def result_results():
+ results = re.compile("Result:\s+(?P<result>\w+)")
+ finished = 0
+ for line in sys.stdin:
+ for parser in [results]:
+ result = parser.search(line)
+ if result is not None:
+ if parser is results:
+ test_result = result.group('result')
+ print "%s-%s %s" % (test_name, test_args, test_result)
+ finished += 1
+ break
+ else:
+ continue
+ if finished == 1:
+ return
+
+ print "ERROR: Parser failed and ran to EOF"
+ sys.exit(-1)
+
+
+def sched_jitter_results():
+ maximum = re.compile("^max jitter:\s+(?P<max>[\d\.]+)\s+(?P<units>\w+)")
+ finished = 0
+ for line in sys.stdin:
+ for parser in [maximum]:
+ result = parser.search(line)
+ if result is not None:
+ if parser is maximum:
+ test_max = result.group('max')
+ units = result.group('units')
+ print "%s%s_max_jitter pass %s %s" % (test_name, test_args, test_max, units)
+ finished += 1
+ break
+ else:
+ continue
+ if finished == 1:
+ # print "min:%s max:%s avg:%s stddev:%s" % (test_min, test_max, test_avg, test_stddev)
+ return
+
+ print "ERROR: Parser failed and ran to EOF"
+ sys.exit(-1)
+
+
+def pi_perf_results():
+ minimum = re.compile("^Min delay =\s+(?P<min>[\d\.]+)\s+(?P<units>\w+)")
+ maximum = re.compile("^Max delay =\s+(?P<max>[\d\.]+)\s+(?P<units>\w+)")
+ average = re.compile("^Average delay =\s+(?P<average>[\d\.]+)\s+(?P<units>\w+)")
+ standarddev = re.compile("^Standard Deviation =\s+(?P<stddev>[\d\.]+)\s+(?P<units>\w+)")
+ finished = 0
+ for line in sys.stdin:
+ for parser in [maximum, minimum, average, standarddev]:
+ result = parser.search(line)
+ if result is not None:
+ if parser is minimum:
+ test_min = result.group('min')
+ units = result.group('units')
+ print "%s%s_min pass %s %s" % (test_name, test_args, test_min, units)
+ finished += 1
+ break
+ if parser is maximum:
+ test_max = result.group('max')
+ units = result.group('units')
+ print "%s%s_max pass %s %s" % (test_name, test_args, test_max, units)
+ finished += 1
+ break
+ if parser is average:
+ test_avg = result.group('average')
+ units = result.group('units')
+ print "%s%s_avg pass %s %s" % (test_name, test_args, test_avg, units)
+ finished += 1
+ break
+ if parser is standarddev:
+ test_stddev = result.group('stddev')
+ units = result.group('units')
+ print "%s%s_stddev pass %s %s" % (test_name, test_args, test_stddev, units)
+ finished += 1
+ break
+ else:
+ continue
+ if finished == 4:
+ return
+
+ print "ERROR: Parser failed and ran to EOF"
+ sys.exit(-1)
+
+
+def do_nothing():
+ return
+
+
+# names of the test parsed out fo the input stream, converted to functioncalls
+def async_handler():
+ standard_results()
+ result_results()
+
+
+def tc_2():
+ result_results()
+
+
+def gtod_latency():
+ standard_results()
+
+
+def periodic_cpu_load_single():
+ standard_results()
+
+
+def sched_latency():
+ standard_results()
+
+
+def sched_jitter():
+ sched_jitter_results()
+
+
+def sched_football():
+ result_results()
+
+
+def rt_migrate():
+ result_results()
+
+
+def pthread_kill_latency():
+ standard_results()
+ result_results()
+
+
+def prio_wake():
+ result_results()
+
+
+def pi_perf():
+ pi_perf_results()
+
+
+def prio_preempt():
+ result_results()
+
+
+def matrix_mult():
+ result_results()
+
+
+def periodic_cpu_load():
+ result_results()
+
+
+def async_handler_jk():
+ result_results()
+
+# Parse the input stream and tuen test names into function calls to parse their
+# details
+
+test_start = re.compile("--- Running testcase (?P<name>[a-zA-Z0-9_-]+)\s+(?P<args>[a-zA-Z0-9_.\- ]*?)\s*---")
+test_finish = re.compile("The .* test appears to have completed.")
+
+for line in sys.stdin:
+ for parser in [test_start, test_finish]:
+ result = parser.search(line)
+ if result is not None:
+ if parser is test_start:
+ test_name = result.group('name')
+ func_name = result.group('name')
+ func_name = func_name.replace("-", "_")
+ test_args = result.group('args')
+ test_args = test_args.replace(" ", "-")
+ print
+ print "test_start = " + test_name + test_args
+ globals()[func_name]()
+ break
+
+ if parser is test_finish:
+ print "test_finished = " + test_name + test_args
+ break
+ else:
+ continue
diff --git a/automated/linux/ltp-realtime/ltp-realtime.sh b/automated/linux/ltp-realtime/ltp-realtime.sh
new file mode 100755
index 0000000..0fcd40c
--- /dev/null
+++ b/automated/linux/ltp-realtime/ltp-realtime.sh
@@ -0,0 +1,91 @@
+#!/bin/bash
+
+. ../../lib/sh-test-lib
+OUTPUT="$(pwd)/output"
+RESULT_FILE="${OUTPUT}/result.txt"
+TMP_FILE="${OUTPUT}/tmp.txt"
+
+# Absolute path to this script. /home/user/bin/foo.sh
+SCRIPT="$(readlink -f "${0}")"
+# Absolute path this script is in. /home/user/bin
+SCRIPTPATH="$(dirname "${SCRIPT}")"
+echo "Script path is: ${SCRIPTPATH}"
+
+# List of test cases
+LTP_REALTIME_TESTS="async_handler gtod_latency hrtimer-prio matrix_mult measurement periodic_cpu_load pi_perf prio-preempt prio-wake pthread_kill_latency rt-migrate sched_football sched_jitter sched_latency thread_clock"
+
+# LTP version
+LTP_VERSION="20170116"
+SKIP_INSTALL="false"
+
+LTP_PATH=/opt/ltp
+
+usage() {
+ echo "Usage: ${0} [-T async_handler gtod_latency hrtimer-prio matrix_mult measurement periodic_cpu_load pi_perf prio-preempt prio-wake pthread_kill_latency rt-migrate sched_football sched_jitter sched_latency thread_clock] [-s <true|false>] [-v LTP_VERSION]" 1>&2
+ exit 0
+}
+
+while getopts "T:s:v:" arg; do
+ case "$arg" in
+ T) LTP_REALTIME_TESTS="${OPTARG}";;
+ # SKIP_INSTALL is true in case of Open Embedded builds
+ # SKIP_INSTALL is false in case of Debian builds
+ s) SKIP_INSTALL="${OPTARG}";;
+ v) LTP_VERSION="${OPTARG}";;
+ esac
+done
+
+# Install LTP test suite
+install_ltp() {
+ rm -rf /opt/ltp
+ mkdir -p /opt/ltp
+ # shellcheck disable=SC2164
+ cd /opt/ltp
+ # shellcheck disable=SC2140
+ wget https://github.com/linux-test-project/ltp/releases/download/"${LTP_VERSION}"/ltp-full-"${LTP_VERSION}".tar.xz
+ tar --strip-components=1 -Jxf ltp-full-"${LTP_VERSION}".tar.xz
+ ./configure --with-realtime-testsuite
+ make -C testcases/realtime/
+}
+
+# Run LTP realtime test suite
+run_ltp_realtime() {
+ # shellcheck disable=SC2164
+ cd "${LTP_PATH}"
+ for TEST in ${LTP_REALTIME_TESTS}; do
+ pipe0_status "./testscripts/test_realtime.sh -t func/${TEST}" "tee -a ${TMP_FILE}"
+ done
+ # shellcheck disable=SC2002
+ cat "${TMP_FILE}" | "${SCRIPTPATH}"/ltp-realtime.py 2>&1 | tee -a "${RESULT_FILE}"
+}
+
+# Test run.
+! check_root && error_msg "This script must be run as root"
+[ -d "${OUTPUT}" ] && mv "${OUTPUT}" "${OUTPUT}_$(date +%Y%m%d%H%M%S)"
+mkdir -p "${OUTPUT}"
+
+info_msg "About to run ltp realtime test..."
+info_msg "Output directory: ${OUTPUT}"
+
+if [ "${SKIP_INSTALL}" = "True" ] || [ "${SKIP_INSTALL}" = "true" ]; then
+ info_msg "install_ltp skipped"
+else
+ dist_name
+ # shellcheck disable=SC2154
+ case "${dist}" in
+ Debian|Ubuntu)
+ pkgs="xz-utils flex bison build-essential wget curl net-tools"
+ install_deps "${pkgs}" "${SKIP_INSTALL}"
+ ;;
+ CentOS|Fedora)
+ pkgs="xz flex bison make automake gcc gcc-c++ kernel-devel wget curl net-tools"
+ install_deps "${pkgs}" "${SKIP_INSTALL}"
+ ;;
+ *)
+ warn_msg "Unsupported distribution: package install skipped"
+ esac
+ info_msg "Run install_ltp"
+ install_ltp
+fi
+info_msg "Running run_ltp_realtime"
+run_ltp_realtime
diff --git a/automated/linux/ltp-realtime/ltp-realtime.yaml b/automated/linux/ltp-realtime/ltp-realtime.yaml
new file mode 100644
index 0000000..b09a383
--- /dev/null
+++ b/automated/linux/ltp-realtime/ltp-realtime.yaml
@@ -0,0 +1,38 @@
+metadata:
+ name: ltp-realtime
+ format: "Lava-Test-Shell Test Definition 1.0"
+ description: "Run LTP realtime test suite on Ubuntu/CentOS/Openembedded.
+ This testsuite contains some functional tests and a few
+ performance and latency measurement tests"
+ maintainer:
+ - anders.roxell@linaro.org
+ - mike.holmes@linaro.org
+ - naresh.kamboju@linaro.org
+ os:
+ - debian
+ - ubuntu
+ - fedora
+ - centos
+ - openembedded
+ scope:
+ - preempt-rt
+ devices:
+ - arndale
+params:
+ # LTP version
+ LTP_VERSION: 20170116
+ LTP_REALTIME_TESTS: "async_handler gtod_latency hrtimer-prio matrix_mult measurement periodic_cpu_load pi_perf prio-preempt prio-wake pthread_kill_latency rt-migrate sched_football sched_jitter sched_latency thread_clock"
+ SKIP_INSTALL: false
+
+run:
+ steps:
+ - cd ./automated/linux/ltp-realtime/
+ - ./ltp-realtime.sh -T "${LTP_REALTIME_TESTS}" -s "${SKIP_INSTALL}" -v "${LTP_VERSION}"
+ - ../../utils/send-to-lava.sh ./output/result.txt
+
+parse:
+ pattern: "^(?!.+ED)(?P<test_case_id>\\w+)\\s+(?P<result>PASS|FAIL|CONF)\\s+\\d+"
+ fixupdict:
+ FAIL: fail
+ PASS: pass
+ CONF: skip