[Fuego] [PATCH 1/3] svsematest: Add a new test of the rt-tests

Hoang Van Tuyen tuyen.hoangvan at toshiba-tsdv.com
Thu Jan 25 01:39:49 UTC 2018


The svsematest starts two threads or fork two processes and
measure the latency of SYSV semaphores.

Signed-off-by: Hoang Van Tuyen <tuyen.hoangvan at toshiba-tsdv.com>
---
  .../tests/Benchmark.svsematest/chart_config.json   |  5 +++++
  engine/tests/Benchmark.svsematest/criteria.json    | 26 
++++++++++++++++++++++
  engine/tests/Benchmark.svsematest/fuego_test.sh    | 25 
+++++++++++++++++++++
  engine/tests/Benchmark.svsematest/parser.py        | 23 
+++++++++++++++++++
  engine/tests/Benchmark.svsematest/reference.json   | 26 
++++++++++++++++++++++
  engine/tests/Benchmark.svsematest/spec.json        | 14 ++++++++++++
  6 files changed, 119 insertions(+)
  create mode 100644 engine/tests/Benchmark.svsematest/chart_config.json
  create mode 100644 engine/tests/Benchmark.svsematest/criteria.json
  create mode 100755 engine/tests/Benchmark.svsematest/fuego_test.sh
  create mode 100755 engine/tests/Benchmark.svsematest/parser.py
  create mode 100644 engine/tests/Benchmark.svsematest/reference.json
  create mode 100644 engine/tests/Benchmark.svsematest/spec.json

diff --git a/engine/tests/Benchmark.svsematest/chart_config.json 
b/engine/tests/Benchmark.svsematest/chart_config.json
new file mode 100644
index 0000000..cdaf6a2
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/chart_config.json
@@ -0,0 +1,5 @@
+{
+    "chart_type": "measure_plot",
+    "measures": ["default.latencies.max_latency",
+        "default.latencies.avg_latency"]
+}
diff --git a/engine/tests/Benchmark.svsematest/criteria.json 
b/engine/tests/Benchmark.svsematest/criteria.json
new file mode 100644
index 0000000..a023558
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/criteria.json
@@ -0,0 +1,26 @@
+{
+    "schema_version":"1.0",
+    "criteria":[
+        {
+            "tguid":"default.latencies.max_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        },
+        {
+            "tguid":"default.latencies.min_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        },
+        {
+            "tguid":"default.latencies.avg_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        }
+    ]
+}
diff --git a/engine/tests/Benchmark.svsematest/fuego_test.sh 
b/engine/tests/Benchmark.svsematest/fuego_test.sh
new file mode 100755
index 0000000..9e90535
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/fuego_test.sh
@@ -0,0 +1,25 @@
+tarball=../rt-tests/rt-tests-v1.1.1.tar.gz
+
+NEED_ROOT=1
+
+function test_pre_check {
+    assert_define BENCHMARK_SVSEMATEST_PARAMS
+}
+
+function test_build {
+    patch -p1 -N -s < 
$TEST_HOME/../rt-tests/0001-Add-scheduling-policies-for-old-kernels.patch
+    make NUMA=0 svsematest
+}
+
+function test_deploy {
+    put svsematest  $BOARD_TESTDIR/fuego.$TESTDIR/
+}
+
+function test_run {
+    # svsematest does not support a option for printing a summary only 
on exit.
+    # So, We get some lines at the end of the command's output.
+    # The number for getting the lines depends on the cpu number of 
target machine.
+    target_cpu_number=$(cmd "nproc")
+    getting_line_number=$(( expr $target_cpu_number + $target_cpu_number ))
+    report "cd $BOARD_TESTDIR/fuego.$TESTDIR; ./svsematest 
$BENCHMARK_SVSEMATEST_PARAMS | tail -$getting_line_number"
+}
diff --git a/engine/tests/Benchmark.svsematest/parser.py 
b/engine/tests/Benchmark.svsematest/parser.py
new file mode 100755
index 0000000..edc77ff
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/parser.py
@@ -0,0 +1,23 @@
+#!/usr/bin/python
+import os, re, sys
+sys.path.insert(0, os.environ['FUEGO_CORE'] + '/engine/scripts/parser')
+import common as plib
+
+regex_string = ".*, Min\s+(\d+).*, Avg\s+(\d+), Max\s+(\d+)"
+measurements = {}
+matches = plib.parse_log(regex_string)
+
+if matches:
+    min_latencies = []
+    avg_latencies = []
+    max_latencies = []
+    for thread in matches:
+        min_latencies.append(float(thread[0]))
+        avg_latencies.append(float(thread[1]))
+        max_latencies.append(float(thread[2]))
+    measurements['default.latencies'] = [
+        {"name": "max_latency", "measure" : max(max_latencies)},
+        {"name": "min_latency", "measure" : min(min_latencies)},
+        {"name": "avg_latency", "measure" : 
sum(avg_latencies)/len(avg_latencies)}]
+
+sys.exit(plib.process(measurements))
diff --git a/engine/tests/Benchmark.svsematest/reference.json 
b/engine/tests/Benchmark.svsematest/reference.json
new file mode 100644
index 0000000..415a8dd
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/reference.json
@@ -0,0 +1,26 @@
+{
+    "test_sets":[
+        {
+            "name":"default",
+            "test_cases":[
+                {
+                    "name":"latencies",
+                    "measurements":[
+                        {
+                            "name":"max_latency",
+                            "unit":"us"
+                        },
+                        {
+                            "name":"min_latency",
+                            "unit":"us"
+                        },
+                        {
+                            "name":"avg_latency",
+                            "unit":"us"
+                        }
+                    ]
+                }
+            ]
+        }
+    ]
+}
diff --git a/engine/tests/Benchmark.svsematest/spec.json 
b/engine/tests/Benchmark.svsematest/spec.json
new file mode 100644
index 0000000..1a9a767
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/spec.json
@@ -0,0 +1,14 @@
+{
+    "testName": "Benchmark.svsematest",
+    "specs": {
+        "default": {
+            "PARAMS": "-a -t -p99 -i100 -d25 -l10000"
+        },
+        "latest": {
+            "PER_JOB_BUILD": "true",
+            "gitrepo": 
"https://git.kernel.org/pub/scm/utils/rt-tests/rt-tests.git",
+            "gitref": "unstable/devel/v1.1.1",
+            "PARAMS": "-a -t -p99 -i100 -d25 -l10000"
+        }
+    }
+}
-- 
2.1.4



-- 
================================================================
Hoang Van Tuyen (Mr.)
TOSHIBA SOFTWARE DEVELOPMENT (VIETNAM) CO., LTD.
16th Floor, VIT Building, 519 Kim Ma Str., Ba Dinh Dist., Hanoi, Vietnam
Tel: 84-4-22208801 (Company) - Ext.251
Fax: 84-4-22208802 (Company)
Email: tuyen.hoangvan at toshiba-tsdv.com
================================================================

-------------- next part --------------
From a6cc8b4f1bd6bbafec7944735cc18674428acecb Mon Sep 17 00:00:00 2001
From: Hoang Van Tuyen <tuyen.hoangvan at toshiba-tsdv.com>
Date: Tue, 16 Jan 2018 15:48:17 +0700
Subject: [PATCH 1/3] svsematest: Add a new test of the rt-tests

The svsematest starts two threads or fork two processes and
measure the latency of SYSV semaphores.

Signed-off-by: Hoang Van Tuyen <tuyen.hoangvan at toshiba-tsdv.com>
---
 .../tests/Benchmark.svsematest/chart_config.json   |  5 +++++
 engine/tests/Benchmark.svsematest/criteria.json    | 26 ++++++++++++++++++++++
 engine/tests/Benchmark.svsematest/fuego_test.sh    | 25 +++++++++++++++++++++
 engine/tests/Benchmark.svsematest/parser.py        | 23 +++++++++++++++++++
 engine/tests/Benchmark.svsematest/reference.json   | 26 ++++++++++++++++++++++
 engine/tests/Benchmark.svsematest/spec.json        | 14 ++++++++++++
 6 files changed, 119 insertions(+)
 create mode 100644 engine/tests/Benchmark.svsematest/chart_config.json
 create mode 100644 engine/tests/Benchmark.svsematest/criteria.json
 create mode 100755 engine/tests/Benchmark.svsematest/fuego_test.sh
 create mode 100755 engine/tests/Benchmark.svsematest/parser.py
 create mode 100644 engine/tests/Benchmark.svsematest/reference.json
 create mode 100644 engine/tests/Benchmark.svsematest/spec.json

diff --git a/engine/tests/Benchmark.svsematest/chart_config.json b/engine/tests/Benchmark.svsematest/chart_config.json
new file mode 100644
index 0000000..cdaf6a2
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/chart_config.json
@@ -0,0 +1,5 @@
+{
+	"chart_type": "measure_plot",
+	"measures": ["default.latencies.max_latency",
+        "default.latencies.avg_latency"]
+}
diff --git a/engine/tests/Benchmark.svsematest/criteria.json b/engine/tests/Benchmark.svsematest/criteria.json
new file mode 100644
index 0000000..a023558
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/criteria.json
@@ -0,0 +1,26 @@
+{
+    "schema_version":"1.0",
+    "criteria":[
+        {
+            "tguid":"default.latencies.max_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        },
+        {
+            "tguid":"default.latencies.min_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        },
+        {
+            "tguid":"default.latencies.avg_latency",
+            "reference":{
+                "value":100,
+                "operator":"le"
+            }
+        }
+    ]
+}
diff --git a/engine/tests/Benchmark.svsematest/fuego_test.sh b/engine/tests/Benchmark.svsematest/fuego_test.sh
new file mode 100755
index 0000000..9e90535
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/fuego_test.sh
@@ -0,0 +1,25 @@
+tarball=../rt-tests/rt-tests-v1.1.1.tar.gz
+
+NEED_ROOT=1
+
+function test_pre_check {
+    assert_define BENCHMARK_SVSEMATEST_PARAMS
+}
+
+function test_build {
+    patch -p1 -N -s < $TEST_HOME/../rt-tests/0001-Add-scheduling-policies-for-old-kernels.patch
+    make NUMA=0 svsematest
+}
+
+function test_deploy {
+    put svsematest  $BOARD_TESTDIR/fuego.$TESTDIR/
+}
+
+function test_run {
+    # svsematest does not support a option for printing a summary only on exit.
+    # So, We get some lines at the end of the command's output.
+    # The number for getting the lines depends on the cpu number of target machine.
+    target_cpu_number=$(cmd "nproc")
+    getting_line_number=$(( expr $target_cpu_number + $target_cpu_number ))
+    report "cd $BOARD_TESTDIR/fuego.$TESTDIR; ./svsematest $BENCHMARK_SVSEMATEST_PARAMS | tail -$getting_line_number"
+}
diff --git a/engine/tests/Benchmark.svsematest/parser.py b/engine/tests/Benchmark.svsematest/parser.py
new file mode 100755
index 0000000..edc77ff
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/parser.py
@@ -0,0 +1,23 @@
+#!/usr/bin/python
+import os, re, sys
+sys.path.insert(0, os.environ['FUEGO_CORE'] + '/engine/scripts/parser')
+import common as plib
+
+regex_string = ".*, Min\s+(\d+).*, Avg\s+(\d+), Max\s+(\d+)"
+measurements = {}
+matches = plib.parse_log(regex_string)
+
+if matches:
+	min_latencies = []
+	avg_latencies = []
+	max_latencies = []
+	for thread in matches:
+		min_latencies.append(float(thread[0]))
+		avg_latencies.append(float(thread[1]))
+		max_latencies.append(float(thread[2]))
+	measurements['default.latencies'] = [
+		{"name": "max_latency", "measure" : max(max_latencies)},
+		{"name": "min_latency", "measure" : min(min_latencies)},
+		{"name": "avg_latency", "measure" : sum(avg_latencies)/len(avg_latencies)}]
+
+sys.exit(plib.process(measurements))
diff --git a/engine/tests/Benchmark.svsematest/reference.json b/engine/tests/Benchmark.svsematest/reference.json
new file mode 100644
index 0000000..415a8dd
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/reference.json
@@ -0,0 +1,26 @@
+{
+    "test_sets":[
+        {
+            "name":"default",
+            "test_cases":[
+                {
+                    "name":"latencies",
+                    "measurements":[
+                        {
+                            "name":"max_latency",
+                            "unit":"us"
+                        },
+                        {
+                            "name":"min_latency",
+                            "unit":"us"
+                        },
+                        {
+                            "name":"avg_latency",
+                            "unit":"us"
+                        }
+                    ]
+                }
+            ]
+        }
+    ]
+}
diff --git a/engine/tests/Benchmark.svsematest/spec.json b/engine/tests/Benchmark.svsematest/spec.json
new file mode 100644
index 0000000..1a9a767
--- /dev/null
+++ b/engine/tests/Benchmark.svsematest/spec.json
@@ -0,0 +1,14 @@
+{
+    "testName": "Benchmark.svsematest",
+    "specs": {
+        "default": {
+            "PARAMS": "-a -t -p99 -i100 -d25 -l10000"
+        },
+        "latest": {
+            "PER_JOB_BUILD": "true",
+            "gitrepo": "https://git.kernel.org/pub/scm/utils/rt-tests/rt-tests.git",
+            "gitref": "unstable/devel/v1.1.1",
+            "PARAMS": "-a -t -p99 -i100 -d25 -l10000"
+        }
+    }
+}
-- 
2.1.4
-------------- next part --------------
-- 
This mail was scanned by BitDefender
For more information please visit http://www.bitdefender.com


More information about the Fuego mailing list