From 5ce6f29f8a8369a30077b5be3c8788b016767ca7 Mon Sep 17 00:00:00 2001
From: Dariusz Kedzierski <dkedzierski@syncad.com>
Date: Tue, 27 Oct 2020 09:40:48 +0100
Subject: [PATCH] Add info about failing benchmarks to the console

---
 scripts/ci_start_api_benchmark.sh |  1 +
 scripts/json_report_parser.py     | 14 ++++++++++----
 2 files changed, 11 insertions(+), 4 deletions(-)

diff --git a/scripts/ci_start_api_benchmark.sh b/scripts/ci_start_api_benchmark.sh
index 4be461787..ae542eea3 100755
--- a/scripts/ci_start_api_benchmark.sh
+++ b/scripts/ci_start_api_benchmark.sh
@@ -17,6 +17,7 @@ BASE_DIR=$(pwd)
 echo "Script base dir is: $BASE_DIR"
 
 pip3 install tox --user
+pip3 install prettytable --user
 
 echo "Creating benchmark test file as: $4.py"
 $BASE_DIR/tests/tests_api/hivemind/benchmarks/benchmark_generator.py $3 "$4.py" "http://$1:$2"
diff --git a/scripts/json_report_parser.py b/scripts/json_report_parser.py
index 4a553dee6..bde8916cd 100755
--- a/scripts/json_report_parser.py
+++ b/scripts/json_report_parser.py
@@ -1,6 +1,5 @@
 #!/usr/bin/python3
 
-import xml.dom.minidom
 import os
 from sys import exit
 from json import dumps, load
@@ -40,7 +39,7 @@ def class_to_path(class_name, class_to_path_dic):
     return None
 
 if __name__ == '__main__':
-    above_treshold = False
+    above_treshold = []
     import argparse
     parser = argparse.ArgumentParser()
     parser.add_argument("path_to_test_dir", type = str, help = "Path to test directory for given json benchmark file")
@@ -65,19 +64,26 @@ if __name__ == '__main__':
         ofile.write("  </head>\n")
         ofile.write("  <body>\n")
         ofile.write("    <table>\n")
-        ofile.write("      <tr><th>Test name</th><th>Time [s]</th></tr>\n")
+        ofile.write("      <tr><th>Test name</th><th>Mean time [s]</th></tr>\n")
         json_data = None
         with open(args.json_file, "r") as json_file:
             json_data = load(json_file)
         for benchmark in json_data['benchmarks']:
             if float(benchmark['stats']['mean']) > args.time_threshold:
                 ofile.write("      <tr><td>{}<br/>Parameters: {}</td><td bgcolor=\"red\">{:.4f}</td></tr>\n".format(benchmark['name'], get_request_from_yaml(class_to_path(benchmark['name'][5:], class_to_path_dic)), benchmark['stats']['mean']))
-                above_treshold = True
+                above_treshold.append((benchmark['name'], benchmark['stats']['mean'], get_request_from_yaml(class_to_path(benchmark['name'][5:], class_to_path_dic))))
             else:
                 ofile.write("      <tr><td>{}</td><td>{:.4f}</td></tr>\n".format(benchmark['name'], benchmark['stats']['mean']))
         ofile.write("    </table>\n")
         ofile.write("  </body>\n")
         ofile.write("</html>\n")
     if above_treshold:
+        from prettytable import PrettyTable
+        summary = PrettyTable()
+        print("########## Test failed with following tests above {}s threshold ##########".format(args.time_threshold))
+        summary.field_names = ['Test name', 'Mean time [s]', 'Call parameters']
+        for entry in above_treshold:
+            summary.add_row(entry)
+        print(summary)
         exit(1)
     exit(0)
-- 
GitLab