[Fuego] [PATCH 2/2] pmqtest: Add a new test for the rt-tests

Hoang Van Tuyen tuyen.hoangvan at toshiba-tsdv.com
Tue Jan 16 04:47:52 UTC 2018


The pmqtest starts pairs of threads and measure the latency of
interprocess communication with POSIX messages queues.
As ptsematest, the pmqtest does not support a option for printing
a summary output. We get some lines at the end of the command's
output like a summary of the output.

Signed-off-by: Hoang Van Tuyen <tuyen.hoangvan at toshiba-tsdv.com>
---
  engine/tests/Benchmark.pmqtest/chart_config.json |  5 +++++
  engine/tests/Benchmark.pmqtest/criteria.json     | 26 
++++++++++++++++++++++++
  engine/tests/Benchmark.pmqtest/fuego_test.sh     | 25 
+++++++++++++++++++++++
  engine/tests/Benchmark.pmqtest/parser.py         | 23 
+++++++++++++++++++++
  engine/tests/Benchmark.pmqtest/reference.json    | 26 
++++++++++++++++++++++++
  engine/tests/Benchmark.pmqtest/spec.json         | 14 +++++++++++++
  6 files changed, 119 insertions(+)
  create mode 100644 engine/tests/Benchmark.pmqtest/chart_config.json
  create mode 100644 engine/tests/Benchmark.pmqtest/criteria.json
  create mode 100755 engine/tests/Benchmark.pmqtest/fuego_test.sh
  create mode 100755 engine/tests/Benchmark.pmqtest/parser.py
  create mode 100644 engine/tests/Benchmark.pmqtest/reference.json
  create mode 100644 engine/tests/Benchmark.pmqtest/spec.json

diff --git a/engine/tests/Benchmark.pmqtest/chart_config.json 
b/engine/tests/Benchmark.pmqtest/chart_config.json
new file mode 100644
index 0000000..cdaf6a2
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/chart_config.json
@@ -0,0 +1,5 @@
+{
+    "chart_type": "measure_plot",
+    "measures": ["default.latencies.max_latency",
+        "default.latencies.avg_latency"]
+}
diff --git a/engine/tests/Benchmark.pmqtest/criteria.json 
b/engine/tests/Benchmark.pmqtest/criteria.json
new file mode 100644
index 0000000..a023558
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/criteria.json
@@ -0,0 +1,26 @@
+{
+    "schema_version":"1.0",
+    "criteria":[
+        {
+            "tguid":"default.latencies.max_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        },
+        {
+            "tguid":"default.latencies.min_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        },
+        {
+            "tguid":"default.latencies.avg_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        }
+    ]
+}
diff --git a/engine/tests/Benchmark.pmqtest/fuego_test.sh 
b/engine/tests/Benchmark.pmqtest/fuego_test.sh
new file mode 100755
index 0000000..dc82460
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/fuego_test.sh
@@ -0,0 +1,25 @@
+tarball=../rt-tests/rt-tests-v1.1.1.tar.gz
+
+NEED_ROOT=1
+
+function test_pre_check {
+    assert_define BENCHMARK_PMQTEST_PARAMS
+}
+
+function test_build {
+    patch -p1 -N -s < 
$TEST_HOME/../rt-tests/0001-Add-scheduling-policies-for-old-kernels.patch
+    make NUMA=0 pmqtest
+}
+
+function test_deploy {
+    put pmqtest  $BOARD_TESTDIR/fuego.$TESTDIR/
+}
+
+function test_run {
+    # pmqtest does not support a option for printing a summary only on 
exit.
+    # So, We get some lines at the end of the command's output.
+    # The number for getting the lines depends on the cpu number of 
target machine.
+    target_cpu_number=`cmd "nproc"`
+    getting_line_number=`expr $target_cpu_number + $target_cpu_number`
+    report "cd $BOARD_TESTDIR/fuego.$TESTDIR; ./pmqtest 
$BENCHMARK_PMQTEST_PARAMS | tail -$getting_line_number"
+}
diff --git a/engine/tests/Benchmark.pmqtest/parser.py 
b/engine/tests/Benchmark.pmqtest/parser.py
new file mode 100755
index 0000000..edc77ff
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/parser.py
@@ -0,0 +1,23 @@
+#!/usr/bin/python
+import os, re, sys
+sys.path.insert(0, os.environ['FUEGO_CORE'] + '/engine/scripts/parser')
+import common as plib
+
+regex_string = ".*, Min\s+(\d+).*, Avg\s+(\d+), Max\s+(\d+)"
+measurements = {}
+matches = plib.parse_log(regex_string)
+
+if matches:
+    min_latencies = []
+    avg_latencies = []
+    max_latencies = []
+    for thread in matches:
+        min_latencies.append(float(thread[0]))
+        avg_latencies.append(float(thread[1]))
+        max_latencies.append(float(thread[2]))
+    measurements['default.latencies'] = [
+        {"name": "max_latency", "measure" : max(max_latencies)},
+        {"name": "min_latency", "measure" : min(min_latencies)},
+        {"name": "avg_latency", "measure" : 
sum(avg_latencies)/len(avg_latencies)}]
+
+sys.exit(plib.process(measurements))
diff --git a/engine/tests/Benchmark.pmqtest/reference.json 
b/engine/tests/Benchmark.pmqtest/reference.json
new file mode 100644
index 0000000..415a8dd
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/reference.json
@@ -0,0 +1,26 @@
+{
+    "test_sets":[
+        {
+            "name":"default",
+            "test_cases":[
+                {
+                    "name":"latencies",
+                    "measurements":[
+                        {
+                            "name":"max_latency",
+                            "unit":"us"
+                        },
+                        {
+                            "name":"min_latency",
+                            "unit":"us"
+                        },
+                        {
+                            "name":"avg_latency",
+                            "unit":"us"
+                        }
+                    ]
+                }
+            ]
+        }
+    ]
+}
diff --git a/engine/tests/Benchmark.pmqtest/spec.json 
b/engine/tests/Benchmark.pmqtest/spec.json
new file mode 100644
index 0000000..d39996d
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/spec.json
@@ -0,0 +1,14 @@
+{
+    "testName": "Benchmark.pmqtest",
+    "specs": {
+        "default": {
+            "PARAMS": "-Sp99 -i100 -d0 -l10000"
+        },
+        "latest": {
+            "PER_JOB_BUILD": "true",
+            "gitrepo": 
"https://git.kernel.org/pub/scm/utils/rt-tests/rt-tests.git",
+            "gitref": "unstable/devel/v1.1.1",
+            "PARAMS": "-Sp99 -i100 -d0 -l10000"
+        }
+    }
+}
-- 
2.1.4


-- 
================================================================
Hoang Van Tuyen (Mr.)
TOSHIBA SOFTWARE DEVELOPMENT (VIETNAM) CO., LTD.
16th Floor, VIT Building, 519 Kim Ma Str., Ba Dinh Dist., Hanoi, Vietnam
Tel: 84-4-22208801 (Company) - Ext.251
Fax: 84-4-22208802 (Company)
Email: tuyen.hoangvan at toshiba-tsdv.com
================================================================

-------------- next part --------------
From 42c29eee992aa870ad03683b5ad5741cffb9ee25 Mon Sep 17 00:00:00 2001
From: Hoang Van Tuyen <tuyen.hoangvan at toshiba-tsdv.com>
Date: Mon, 15 Jan 2018 17:09:20 +0700
Subject: [PATCH 2/2] pmqtest: Add a new test for the rt-tests

The pmqtest starts pairs of threads and measure the latency of
interprocess communication with POSIX messages queues.
As ptsematest, the pmqtest does not support a option for printing
a summary output. We get some lines at the end of the command's
output like a summary of the output.

Signed-off-by: Hoang Van Tuyen <tuyen.hoangvan at toshiba-tsdv.com>
---
 engine/tests/Benchmark.pmqtest/chart_config.json |  5 +++++
 engine/tests/Benchmark.pmqtest/criteria.json     | 26 ++++++++++++++++++++++++
 engine/tests/Benchmark.pmqtest/fuego_test.sh     | 25 +++++++++++++++++++++++
 engine/tests/Benchmark.pmqtest/parser.py         | 23 +++++++++++++++++++++
 engine/tests/Benchmark.pmqtest/reference.json    | 26 ++++++++++++++++++++++++
 engine/tests/Benchmark.pmqtest/spec.json         | 14 +++++++++++++
 6 files changed, 119 insertions(+)
 create mode 100644 engine/tests/Benchmark.pmqtest/chart_config.json
 create mode 100644 engine/tests/Benchmark.pmqtest/criteria.json
 create mode 100755 engine/tests/Benchmark.pmqtest/fuego_test.sh
 create mode 100755 engine/tests/Benchmark.pmqtest/parser.py
 create mode 100644 engine/tests/Benchmark.pmqtest/reference.json
 create mode 100644 engine/tests/Benchmark.pmqtest/spec.json

diff --git a/engine/tests/Benchmark.pmqtest/chart_config.json b/engine/tests/Benchmark.pmqtest/chart_config.json
new file mode 100644
index 0000000..cdaf6a2
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/chart_config.json
@@ -0,0 +1,5 @@
+{
+	"chart_type": "measure_plot",
+	"measures": ["default.latencies.max_latency",
+        "default.latencies.avg_latency"]
+}
diff --git a/engine/tests/Benchmark.pmqtest/criteria.json b/engine/tests/Benchmark.pmqtest/criteria.json
new file mode 100644
index 0000000..a023558
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/criteria.json
@@ -0,0 +1,26 @@
+{
+    "schema_version":"1.0",
+    "criteria":[
+        {
+            "tguid":"default.latencies.max_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        },
+        {
+            "tguid":"default.latencies.min_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        },
+        {
+            "tguid":"default.latencies.avg_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        }
+    ]
+}
diff --git a/engine/tests/Benchmark.pmqtest/fuego_test.sh b/engine/tests/Benchmark.pmqtest/fuego_test.sh
new file mode 100755
index 0000000..dc82460
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/fuego_test.sh
@@ -0,0 +1,25 @@
+tarball=../rt-tests/rt-tests-v1.1.1.tar.gz
+
+NEED_ROOT=1
+
+function test_pre_check {
+    assert_define BENCHMARK_PMQTEST_PARAMS
+}
+
+function test_build {
+    patch -p1 -N -s < $TEST_HOME/../rt-tests/0001-Add-scheduling-policies-for-old-kernels.patch
+    make NUMA=0 pmqtest
+}
+
+function test_deploy {
+    put pmqtest  $BOARD_TESTDIR/fuego.$TESTDIR/
+}
+
+function test_run {
+    # pmqtest does not support a option for printing a summary only on exit.
+    # So, We get some lines at the end of the command's output.
+    # The number for getting the lines depends on the cpu number of target machine.
+    target_cpu_number=`cmd "nproc"`
+    getting_line_number=`expr $target_cpu_number + $target_cpu_number`
+    report "cd $BOARD_TESTDIR/fuego.$TESTDIR; ./pmqtest $BENCHMARK_PMQTEST_PARAMS | tail -$getting_line_number"
+}
diff --git a/engine/tests/Benchmark.pmqtest/parser.py b/engine/tests/Benchmark.pmqtest/parser.py
new file mode 100755
index 0000000..edc77ff
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/parser.py
@@ -0,0 +1,23 @@
+#!/usr/bin/python
+import os, re, sys
+sys.path.insert(0, os.environ['FUEGO_CORE'] + '/engine/scripts/parser')
+import common as plib
+
+regex_string = ".*, Min\s+(\d+).*, Avg\s+(\d+), Max\s+(\d+)"
+measurements = {}
+matches = plib.parse_log(regex_string)
+
+if matches:
+	min_latencies = []
+	avg_latencies = []
+	max_latencies = []
+	for thread in matches:
+		min_latencies.append(float(thread[0]))
+		avg_latencies.append(float(thread[1]))
+		max_latencies.append(float(thread[2]))
+	measurements['default.latencies'] = [
+		{"name": "max_latency", "measure" : max(max_latencies)},
+		{"name": "min_latency", "measure" : min(min_latencies)},
+		{"name": "avg_latency", "measure" : sum(avg_latencies)/len(avg_latencies)}]
+
+sys.exit(plib.process(measurements))
diff --git a/engine/tests/Benchmark.pmqtest/reference.json b/engine/tests/Benchmark.pmqtest/reference.json
new file mode 100644
index 0000000..415a8dd
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/reference.json
@@ -0,0 +1,26 @@
+{
+    "test_sets":[
+        {
+            "name":"default",
+            "test_cases":[
+                {
+                    "name":"latencies",
+                    "measurements":[
+                        {
+                            "name":"max_latency",
+                            "unit":"us"
+                        },
+                        {
+                            "name":"min_latency",
+                            "unit":"us"
+                        },
+                        {
+                            "name":"avg_latency",
+                            "unit":"us"
+                        }
+                    ]
+                }
+            ]
+        }
+    ]
+}
diff --git a/engine/tests/Benchmark.pmqtest/spec.json b/engine/tests/Benchmark.pmqtest/spec.json
new file mode 100644
index 0000000..d39996d
--- /dev/null
+++ b/engine/tests/Benchmark.pmqtest/spec.json
@@ -0,0 +1,14 @@
+{
+    "testName": "Benchmark.pmqtest",
+    "specs": {
+        "default": {
+            "PARAMS": "-Sp99 -i100 -d0 -l10000"
+        },
+        "latest": {
+            "PER_JOB_BUILD": "true",
+            "gitrepo": "https://git.kernel.org/pub/scm/utils/rt-tests/rt-tests.git",
+            "gitref": "unstable/devel/v1.1.1",
+            "PARAMS": "-Sp99 -i100 -d0 -l10000"
+        }
+    }
+}
-- 
2.1.4
-------------- next part --------------
-- 
This mail was scanned by BitDefender
For more information please visit http://www.bitdefender.com


More information about the Fuego mailing list