Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • hive/hivemind
1 result
Show changes
Commits on Source (64)
......@@ -7,6 +7,7 @@ stages:
- data-supply
- deploy
- e2e-test
- benchmark-tests
- post-deploy
variables:
......@@ -242,7 +243,7 @@ follow_api_smoketest:
artifacts:
reports:
junit: api_smoketest_follow_api.xml
junit: api_smoketest.xml
follow_api_smoketest_negative:
<<: *common_api_smoketest_job
......@@ -270,6 +271,38 @@ tags_api_smoketest_negative:
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml
api_smoketest_benchmark:
stage: benchmark-tests
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
allow_failure: true
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
script:
- tox -e benchmark -- localhost $HIVEMIND_HTTP_PORT tests/tests_api/hivemind/tavern
artifacts:
reports:
junit: api_smoketest_tags_api_negative.xml
when: always
paths:
- tavern_report_benchmark_bridge_api_patterns.html
- tavern_report_benchmark_bridge_api_negative.html
- tavern_report_benchmark_condenser_api_patterns.html
- tavern_report_benchmark_condenser_api_negative.html
- tavern_report_benchmark_database_api_patterns.html
- tavern_report_benchmark_database_api_negative.html
- tavern_report_benchmark_follow_api_patterns.html
- tavern_report_benchmark_follow_api_negative.html
- tavern_report_benchmark_tags_api_patterns.html
- tavern_report_benchmark_tags_api_negative.html
......@@ -92,6 +92,8 @@ class DbState:
@classmethod
def _disableable_indexes(cls):
to_locate = [
'hive_blocks_created_at_idx',
'hive_follows_ix5a', # (following, state, created_at, follower)
'hive_follows_ix5b', # (follower, state, created_at, following)
'hive_follows_block_num_idx',
......
......@@ -31,6 +31,7 @@ def build_metadata():
sa.UniqueConstraint('hash', name='hive_blocks_ux1'),
sa.ForeignKeyConstraint(['prev'], ['hive_blocks.hash'], name='hive_blocks_fk1'),
sa.Index('hive_blocks_created_at_idx', 'created_at')
)
sa.Table(
......
......@@ -5,11 +5,10 @@ AS
$function$
DECLARE
__post_id INT;
__enable_sort BOOLEAN;
__account_id INT;
BEGIN
SHOW enable_sort INTO __enable_sort;
__post_id = find_comment_id( _author, _permlink, True );
SET enable_sort=false;
__account_id = find_account_id( _observer, True );
RETURN QUERY SELECT
hp.id,
hp.author,
......@@ -50,16 +49,10 @@ BEGIN
FROM
hive_posts_view hp
JOIN hive_subscriptions hs ON hp.community_id = hs.community_id
JOIN hive_accounts ha_o ON ha_o.id = hs.account_id
JOIN hive_accounts_view ha ON ha.id = hp.author_id
WHERE ha_o.name = _observer AND hp.depth = 0 AND NOT ha.is_grayed AND ( __post_id = 0 OR hp.id < __post_id )
WHERE hs.account_id = __account_id AND hp.depth = 0 AND NOT ha.is_grayed AND ( __post_id = 0 OR hp.id < __post_id )
ORDER BY hp.id DESC
LIMIT _limit;
IF __enable_sort THEN
SET enable_sort=true;
ELSE
SET enable_sort=false;
END IF;
END
$function$
language plpgsql VOLATILE;
......@@ -72,14 +65,13 @@ $function$
DECLARE
__post_id INT;
__hot_limit FLOAT;
__enable_sort BOOLEAN;
__account_id INT;
BEGIN
SHOW enable_sort INTO __enable_sort;
__post_id = find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT hp.sc_hot INTO __hot_limit FROM hive_posts hp WHERE hp.id = __post_id;
END IF;
SET enable_sort=false;
__account_id = find_account_id( _observer, True );
RETURN QUERY SELECT
hp.id,
hp.author,
......@@ -120,16 +112,10 @@ BEGIN
FROM
hive_posts_view hp
JOIN hive_subscriptions hs ON hp.community_id = hs.community_id
JOIN hive_accounts ha ON ha.id = hs.account_id
WHERE ha.name = _observer AND NOT hp.is_paidout AND hp.depth = 0
WHERE hs.account_id = __account_id AND NOT hp.is_paidout AND hp.depth = 0
AND ( __post_id = 0 OR hp.sc_hot < __hot_limit OR ( hp.sc_hot = __hot_limit AND hp.id < __post_id ) )
ORDER BY hp.sc_hot DESC, hp.id DESC
LIMIT _limit;
IF __enable_sort THEN
SET enable_sort=true;
ELSE
SET enable_sort=false;
END IF;
END
$function$
language plpgsql VOLATILE;
......@@ -142,14 +128,13 @@ $function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__enable_sort BOOLEAN;
__account_id INT;
BEGIN
SHOW enable_sort INTO __enable_sort;
__post_id = find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
END IF;
SET enable_sort=false;
__account_id = find_account_id( _observer, True );
RETURN QUERY SELECT
hp.id,
hp.author,
......@@ -195,8 +180,7 @@ BEGIN
FROM
hive_posts hp1
JOIN hive_subscriptions hs ON hp1.community_id = hs.community_id
JOIN hive_accounts ha ON ha.id = hs.account_id
WHERE ha.name = _observer AND hp1.counter_deleted = 0 AND NOT hp1.is_paidout AND hp1.depth > 0
WHERE hs.account_id = __account_id AND hp1.counter_deleted = 0 AND NOT hp1.is_paidout AND hp1.depth > 0
AND ( __post_id = 0 OR ( hp1.payout + hp1.pending_payout ) < __payout_limit OR ( ( hp1.payout + hp1.pending_payout ) = __payout_limit AND hp1.id < __post_id ) )
ORDER BY ( hp1.payout + hp1.pending_payout ) DESC, hp1.id DESC
LIMIT _limit
......@@ -204,11 +188,6 @@ BEGIN
JOIN hive_posts_view hp ON hp.id = payout.id
ORDER BY payout.all_payout DESC, payout.id DESC
LIMIT _limit;
IF __enable_sort THEN
SET enable_sort=true;
ELSE
SET enable_sort=false;
END IF;
END
$function$
language plpgsql VOLATILE;
......@@ -222,15 +201,14 @@ DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__head_block_time TIMESTAMP;
__enable_sort BOOLEAN;
__account_id INT;
BEGIN
SHOW enable_sort INTO __enable_sort;
__post_id = find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
END IF;
__account_id = find_account_id( _observer, True );
__head_block_time = head_block_time();
SET enable_sort=false;
RETURN QUERY SELECT
hp.id,
hp.author,
......@@ -271,16 +249,10 @@ BEGIN
FROM
hive_posts_view hp
JOIN hive_subscriptions hs ON hp.community_id = hs.community_id
JOIN hive_accounts ha ON ha.id = hs.account_id
WHERE ha.name = _observer AND NOT hp.is_paidout AND hp.payout_at BETWEEN __head_block_time + interval '12 hours' AND __head_block_time + interval '36 hours'
WHERE hs.account_id = __account_id AND NOT hp.is_paidout AND hp.payout_at BETWEEN __head_block_time + interval '12 hours' AND __head_block_time + interval '36 hours'
AND ( __post_id = 0 OR ( hp.payout + hp.pending_payout ) < __payout_limit OR ( ( hp.payout + hp.pending_payout ) = __payout_limit AND hp.id < __post_id ) )
ORDER BY ( hp.payout + hp.pending_payout ) DESC, hp.id DESC
LIMIT _limit;
IF __enable_sort THEN
SET enable_sort=true;
ELSE
SET enable_sort=false;
END IF;
END
$function$
language plpgsql VOLATILE;
......@@ -293,14 +265,13 @@ $function$
DECLARE
__post_id INT;
__promoted_limit hive_posts.promoted%TYPE;
__enable_sort BOOLEAN;
__account_id INT;
BEGIN
SHOW enable_sort INTO __enable_sort;
__post_id = find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT hp.promoted INTO __promoted_limit FROM hive_posts hp WHERE hp.id = __post_id;
END IF;
SET enable_sort=false;
__account_id = find_account_id( _observer, True );
RETURN QUERY SELECT
hp.id,
hp.author,
......@@ -341,16 +312,10 @@ BEGIN
FROM
hive_posts_view hp
JOIN hive_subscriptions hs ON hp.community_id = hs.community_id
JOIN hive_accounts ha ON ha.id = hs.account_id
WHERE ha.name = _observer AND NOT hp.is_paidout AND hp.promoted > 0
WHERE hs.account_id = __account_id AND NOT hp.is_paidout AND hp.promoted > 0
AND ( __post_id = 0 OR hp.promoted < __promoted_limit OR ( hp.promoted = __promoted_limit AND hp.id < __post_id ) )
ORDER BY hp.promoted DESC, hp.id DESC
LIMIT _limit;
IF __enable_sort THEN
SET enable_sort=true;
ELSE
SET enable_sort=false;
END IF;
END
$function$
language plpgsql VOLATILE;
......@@ -435,14 +400,13 @@ $function$
DECLARE
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
__enable_sort BOOLEAN;
__account_id INT;
BEGIN
SHOW enable_sort INTO __enable_sort;
__post_id = find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
END IF;
SET enable_sort=false;
__account_id = find_account_id( _observer, True );
RETURN QUERY SELECT
hp.id,
hp.author,
......@@ -483,17 +447,11 @@ BEGIN
FROM
hive_posts_view hp
JOIN hive_subscriptions hs ON hp.community_id = hs.community_id
JOIN hive_accounts ha_o ON ha_o.id = hs.account_id
JOIN hive_accounts_view ha ON ha.id = hp.author_id
WHERE ha_o.name = _observer AND NOT hp.is_paidout AND ha.is_grayed AND ( hp.payout + hp.pending_payout ) > 0
WHERE hs.account_id = __account_id AND NOT hp.is_paidout AND ha.is_grayed AND ( hp.payout + hp.pending_payout ) > 0
AND ( __post_id = 0 OR ( hp.payout + hp.pending_payout ) < __payout_limit OR ( ( hp.payout + hp.pending_payout ) = __payout_limit AND hp.id < __post_id ) )
ORDER BY ( hp.payout + hp.pending_payout ) DESC, hp.id DESC
LIMIT _limit;
IF __enable_sort THEN
SET enable_sort=true;
ELSE
SET enable_sort=false;
END IF;
END
$function$
language plpgsql VOLATILE;
......@@ -24,6 +24,7 @@ $BODY$
DECLARE
__account_id INT := 0;
__last_read_at TIMESTAMP;
__last_read_at_block hive_blocks.num%TYPE;
__limit_block hive_blocks.num%TYPE = block_before_head( '90 days' );
BEGIN
__account_id = find_account_id( _account, True );
......@@ -32,11 +33,19 @@ BEGIN
FROM hive_accounts ha
WHERE ha.id = __account_id;
--- Warning given account can have no last_read_at set, so lets fallback to the block limit to avoid comparison to NULL.
SELECT COALESCE((SELECT hb.num
FROM hive_blocks hb
WHERE hb.created_at <= __last_read_at
ORDER by hb.created_at desc
LIMIT 1), __limit_block)
INTO __last_read_at_block;
RETURN QUERY SELECT
__last_read_at as lastread_at,
count(1) as unread
FROM hive_raw_notifications_view hnv
WHERE hnv.dst = __account_id AND hnv.block_num > __limit_block AND hnv.created_at > __last_read_at AND hnv.score >= _minimum_score
WHERE hnv.dst = __account_id AND hnv.block_num > __limit_block AND hnv.block_num > __last_read_at_block AND hnv.score >= _minimum_score
;
END
$BODY$
......
......@@ -147,6 +147,7 @@ values
,(now(), '033619277eccea70118a5b8dc0c73b913da0025f') -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/326 https://gitlab.syncad.com/hive/hivemind/-/merge_requests/322 posts rshares recalc
,(now(), '1847c75702384c7e34c624fc91f24d2ef20df91d') -- latest version of develop containing included changes.
,(now(), '1f23e1326f3010bc84353aba82d4aa7ff2f999e4') -- hive_posts_author_id_created_at_idx index def. to speedup hive_accounts_info_view.
,(now(), '2a274e586454968a4f298a855a7e60394ed90bde') -- get_number_of_unread_notifications speedup https://gitlab.syncad.com/hive/hivemind/-/merge_requests/348/diffs
) ds (patch_date, patch_revision)
where not exists (select null from hive_db_patch_level hpl where hpl.patched_to_revision = ds.patch_revision);
......
......@@ -293,3 +293,5 @@ DROP INDEX IF EXISTS public.hive_posts_created_at_author_id_idx;
CREATE INDEX IF NOT EXISTS hive_posts_author_id_created_at_idx ON public.hive_posts ( author_id DESC, created_at DESC);
CREATE INDEX IF NOT EXISTS hive_blocks_created_at_idx ON hive_blocks (created_at);
#!/usr/bin/python3
from json import dumps
def make_benchmark_header():
return """from requests import post
from json import dumps
def send_rpc_query(address, data):
response = post(address, data=data)
response_json = response.json()
return response_json
"""
def make_benchmark(test_name, address, test_payload):
return """
def test_{}(benchmark):
response_json = benchmark(send_rpc_query, "{}", dumps({}))
error = response_json.get("error", None)
result = response_json.get("result", None)
assert error is not None or result is not None, "No error or result in response"
""".format(test_name, address, test_payload)
def get_request_from_yaml(path_to_yaml):
import yaml
yaml_document = None
with open(path_to_yaml, "r") as yaml_file:
yaml_document = yaml.load(yaml_file, Loader=yaml.BaseLoader)
if "stages" in yaml_document:
if "request" in yaml_document["stages"][0]:
json_parameters = yaml_document["stages"][0]["request"].get("json", None)
assert json_parameters is not None, "Unable to find json parameters in request"
return dumps(json_parameters)
return None
def make_test_name_from_path(test_path):
splited = test_path.split("/")
return ("_".join(splited[-3:])).replace(".", "_").replace("-", "_")
def make_benchmark_test_file(file_name, address, tests_root_dir):
import os
from fnmatch import fnmatch
pattern = "*.tavern.yaml"
test_files = []
for path, subdirs, files in os.walk(tests_root_dir):
for name in files:
if fnmatch(name, pattern):
test_files.append(os.path.join(path, name))
with open(file_name, "w") as benchmarks_file:
benchmarks_file.write(make_benchmark_header())
for test_file in test_files:
test_name = make_test_name_from_path(test_file)
test_payload = get_request_from_yaml(test_file)
benchmarks_file.write(make_benchmark(test_name, address, test_payload))
benchmarks_file.write("\n")
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("path_to_test_dir", type=str, help = "Path to test directory for given xml file")
parser.add_argument("benchmark_test_file_name", type=str, help="Name of the generated test file")
parser.add_argument("target_ip_address", type=str, help="Address of the hivemind")
args = parser.parse_args()
make_benchmark_test_file(args.benchmark_test_file_name, args.target_ip_address, args.path_to_test_dir)
#!/usr/bin/python3
""" Parse json file generated by pytest benchmarks and create htm report file
for files exceeding expected threshold print information to the console
"""
import os
from sys import exit
from json import dumps, load
def get_request_from_yaml(path_to_yaml):
""" Extract request parameters from given yaml file
Parameters:
- path_to_yaml - path to yaml file
Returns:
- string with request parameters
"""
import yaml
yaml_document = None
with open(path_to_yaml, "r") as yaml_file:
yaml_document = yaml.load(yaml_file, Loader=yaml.BaseLoader)
if "stages" in yaml_document:
if "request" in yaml_document["stages"][0]:
json_parameters = yaml_document["stages"][0]["request"].get("json", None)
assert json_parameters is not None, "Unable to find json parameters in request"
return dumps(json_parameters)
return ""
def make_class_path_dict(root_dir):
""" Scan root dir for files with given pattern and construct dictionary
with keys as path with replaced ., -, / characters and values as file path
Parameters:
- root_dir - dir to scan for files
Returns:
- dict class_name -> path
"""
from fnmatch import fnmatch
pattern = "*.tavern.yaml"
ret = {}
for path, _, files in os.walk(root_dir):
for name in files:
if fnmatch(name, pattern):
test_path = os.path.join(path, name)
ret[test_path.replace(".", "_").replace("-", "_").replace("/", "_")] = test_path
return ret
def class_to_path(class_name, class_to_path_dic):
""" Return path to test file basing on class name
Parameters:
- class_name - test to find,
- class_to_path_dic - dict with class -> path key/values
Return:
- path to test file
"""
from fnmatch import fnmatch
for c, p in class_to_path_dic.items():
if fnmatch(c, "*" + class_name):
return p
return None
def json_report_parser(path_to_test_dir, json_file, time_threshold=1.0):
above_treshold = []
html_file, _ = os.path.splitext(json_file)
html_file = "tavern_report_" + html_file + ".html"
class_to_path_dic = make_class_path_dict(path_to_test_dir)
with open(html_file, "w") as ofile:
ofile.write("<html>\n")
ofile.write(" <head>\n")
ofile.write(" <style>\n")
ofile.write(" table, th, td {\n")
ofile.write(" border: 1px solid black;\n")
ofile.write(" border-collapse: collapse;\n")
ofile.write(" }\n")
ofile.write(" th, td {\n")
ofile.write(" padding: 15px;\n")
ofile.write(" }\n")
ofile.write(" </style>\n")
ofile.write(" </head>\n")
ofile.write(" <body>\n")
ofile.write(" <table>\n")
ofile.write(" <tr><th>Test name</th><th>Min time [ms]</th><th>Max time [ms]</th><th>Mean time [ms]</th></tr>\n")
json_data = None
with open(json_file, "r") as json_file:
json_data = load(json_file)
for benchmark in json_data['benchmarks']:
if float(benchmark['stats']['mean']) > time_threshold:
ofile.write(" <tr><td>{}<br/>Parameters: {}</td><td>{:.4f}</td><td>{:.4f}</td><td bgcolor=\"red\">{:.4f}</td></tr>\n".format(benchmark['name'], get_request_from_yaml(class_to_path(benchmark['name'][5:], class_to_path_dic)), benchmark['stats']['min'] * 1000, benchmark['stats']['max'] * 1000, benchmark['stats']['mean'] * 1000))
above_treshold.append((benchmark['name'], "{:.4f}".format(benchmark['stats']['mean'] * 1000), get_request_from_yaml(class_to_path(benchmark['name'][5:], class_to_path_dic))))
else:
ofile.write(" <tr><td>{}</td><td>{:.4f}</td><td>{:.4f}</td><td>{:.4f}</td></tr>\n".format(benchmark['name'], benchmark['stats']['min'] * 1000, benchmark['stats']['max'] * 1000, benchmark['stats']['mean'] * 1000))
ofile.write(" </table>\n")
ofile.write(" </body>\n")
ofile.write("</html>\n")
return above_treshold
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("path_to_test_dir", type = str, help = "Path to test directory for given json benchmark file")
parser.add_argument("json_file", type = str, help = "Path to benchmark json file")
parser.add_argument("--time-threshold", dest="time_threshold", type=float, default=1.0, help="Time threshold for test execution time, tests with execution time greater than threshold will be marked on red.")
args = parser.parse_args()
if not json_report_parser(args.path_to_test_dir, args.json_file, args.time_threshold):
exit(1)
exit(0)
......@@ -9,5 +9,4 @@ echo "Starting tests on hivemind server running on ${HIVEMIND_ADDRESS}:${HIVEMIN
echo "Selected test group (if empty all will be executed): $3"
tox -- -W ignore::pytest.PytestDeprecationWarning -n auto --durations=0 \
--junitxml=../../../../$4 $3
tox -e tavern -- -W ignore::pytest.PytestDeprecationWarning -n auto --junitxml=../../../../$4 $3
#!/usr/bin/python3
import os
import subprocess
from json import load, dump
from benchmark_generator import make_benchmark_test_file
from json_report_parser import json_report_parser
def get_test_directories(tests_root_dir):
ret = []
for name in os.listdir(tests_root_dir):
dir_path = os.path.join(tests_root_dir, name)
if os.path.isdir(dir_path):
ret.append(dir_path)
return ret
def find_data_in_benchmarks(name, json_data):
for benchmark in json_data['benchmarks']:
if benchmark['name'] == name:
return (benchmark['stats']['min'], benchmark['stats']['max'], benchmark['stats']['mean'])
return (None, None, None)
def join_benchmark_data(file_name, json_files):
from statistics import mean
jsons = []
for json_file in json_files:
with open(json_file, "r") as src:
jsons.append(load(src))
for benchmark in jsons[0]['benchmarks']:
bmin = []
bmax = []
bmean = []
for j in jsons:
data = find_data_in_benchmarks(benchmark['name'], j)
if data[0] is not None:
bmin.append(data[0])
if data[1] is not None:
bmax.append(data[1])
if data[2] is not None:
bmean.append(data[2])
benchmark['stats']['min'] = min(bmin)
benchmark['stats']['max'] = max(bmax)
benchmark['stats']['mean'] = mean(bmean)
with open("{}.json".format(file_name), "w") as out:
dump(jsons[0], out)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("hivemind_address", type=str, help="Address of hivemind instance")
parser.add_argument("hivemind_port", type=int, help="Port of hivemind instance")
parser.add_argument("tests_root_dir", type=str, help="Path to tests root dir")
parser.add_argument("--benchmark-runs", type=int, default=3, help="How many benchmark runs")
parser.add_argument("--time-threshold", dest="time_threshold", type=float, default=1.0, help="Time threshold for test execution time, tests with execution time greater than threshold will be marked on red.")
args = parser.parse_args()
assert os.path.exists(args.tests_root_dir), "Directory does not exist"
assert args.benchmark_runs > 0, "Benchmarks runs option has to be positive number"
hivemind_url = "http://{}:{}".format(args.hivemind_address, args.hivemind_port)
test_directories = get_test_directories(args.tests_root_dir)
benchmarks_files = []
for test_directory in test_directories:
benchmark_file_name = "benchmark_" + test_directory.split("/")[-1] + ".py"
make_benchmark_test_file(benchmark_file_name, hivemind_url, test_directory)
benchmarks_files.append(benchmark_file_name)
benchmark_json_files = {}
for run in range(args.benchmark_runs):
for benchmark_file in benchmarks_files:
name, ext = os.path.splitext(benchmark_file)
json_file_name = "{}-{:03d}.json".format(name, run)
cmd = [
"pytest",
"--benchmark-max-time=0.000001",
"--benchmark-min-rounds=10",
"--benchmark-json={}".format(json_file_name),
benchmark_file
]
if name in benchmark_json_files:
benchmark_json_files[name].append(json_file_name)
else:
benchmark_json_files[name] = [json_file_name]
ret = subprocess.run(cmd)
if ret.returncode != 0:
print("Error while running `{}`".format(' '.join(cmd)))
exit(1)
for name, json_files in benchmark_json_files.items():
join_benchmark_data(name, json_files)
failed = []
for test_directory in test_directories:
json_file_name = "benchmark_" + test_directory.split("/")[-1] + ".json"
ret = json_report_parser(test_directory, json_file_name, args.time_threshold)
if ret:
failed.extend(ret)
if failed:
from prettytable import PrettyTable
summary = PrettyTable()
print("########## Test failed with following tests above {}ms threshold ##########".format(args.time_threshold * 1000))
summary.field_names = ['Test name', 'Mean time [ms]', 'Call parameters']
for entry in failed:
summary.add_row(entry)
print(summary)
exit(2)
exit(0)
......@@ -9,4 +9,4 @@ echo Attempting to start tests on hivemind instance listeing on: $HIVEMIND_ADDRE
echo "Selected test group (if empty all will be executed): $3"
tox -- -W ignore::pytest.PytestDeprecationWarning -n auto --durations=0 --junitxml=../../../../$4 $3
tox -e tavern -- -W ignore::pytest.PytestDeprecationWarning -n auto --junitxml=../../../../$4 $3
Subproject commit c3c830a24079e47e59ebdd91a9670c905d6edfff
Subproject commit 819563bf5c43f0d7620b4be6e2a33df86dd168e4
[tox]
envlist = py36
envlist = py36, tavern, benchmark
skipsdist = true
[testenv]
deps =
pytest
[testenv:benchmark]
deps =
{[testenv]deps}
pytest-benchmark
requests
pyyaml
prettytable
commands =
python {toxinidir}/scripts/ci/start_api_benchmark.py {posargs}
[testenv:tavern]
setenv =
PYTHONPATH = {toxinidir}/tests/tests_api/hivemind/tavern:{env:PYTHONPATH:}
......@@ -12,15 +27,8 @@ passenv =
changedir = tests/tests_api/hivemind/tavern
deps =
pytest
pytest-cov
pytest-pylint
pytest-asyncio
pytest-console-scripts
{[testenv]deps}
pytest-xdist
git-pylint-commit-hook
pep8
yapf
tavern
deepdiff[murmur]
jsondiff
......