Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • hive/hivemind
1 result
Show changes
Commits on Source (13)
Showing
with 353 additions and 313 deletions
......@@ -291,18 +291,9 @@ api_smoketest_benchmark:
- hivemind
script:
- tox -e benchmark -- localhost $HIVEMIND_HTTP_PORT tests/tests_api/hivemind/tavern
- ./scripts/ci_start_api_benchmarks.sh localhost $HIVEMIND_HTTP_PORT 5
artifacts:
when: always
paths:
- tavern_report_benchmark_bridge_api_patterns.html
- tavern_report_benchmark_bridge_api_negative.html
- tavern_report_benchmark_condenser_api_patterns.html
- tavern_report_benchmark_condenser_api_negative.html
- tavern_report_benchmark_database_api_patterns.html
- tavern_report_benchmark_database_api_negative.html
- tavern_report_benchmark_follow_api_patterns.html
- tavern_report_benchmark_follow_api_negative.html
- tavern_report_benchmark_tags_api_patterns.html
- tavern_report_benchmark_tags_api_negative.html
- tavern_benchmarks_report.html
......@@ -132,7 +132,10 @@ class DbState:
'hive_votes_voter_id_post_id_idx',
'hive_votes_post_id_voter_id_idx',
'hive_reputation_data_block_num_idx'
'hive_reputation_data_block_num_idx',
'hive_notification_cache_block_num_idx',
'hive_notification_cache_dst_score_idx'
]
to_return = []
......@@ -344,6 +347,14 @@ class DbState:
time_end = perf_counter()
log.info("[INIT] update_posts_rshares executed in %.4fs", time_end - time_start)
time_start = perf_counter()
sql = """
SELECT update_notification_cache(NULL, NULL, False);
"""
DbState.db().query_no_return(sql)
time_end = perf_counter()
log.info("[INIT] update_notification_cache executed in %.4fs", time_end - time_start)
# Update a block num immediately
DbState.db().query_no_return("UPDATE hive_state SET block_num = :block_num", block_num = current_imported_block)
......
......@@ -395,7 +395,7 @@ def build_metadata_community(metadata=None):
sa.Column('dst_id', sa.Integer, nullable=True),
sa.Column('post_id', sa.Integer, nullable=True),
sa.Column('community_id', sa.Integer, nullable=True),
sa.Column('block_num', sa.Integer, nullable=True),
sa.Column('block_num', sa.Integer, nullable=False),
sa.Column('payload', sa.Text, nullable=True),
sa.Index('hive_notifs_ix1', 'dst_id', 'id', postgresql_where=sql_text("dst_id IS NOT NULL")),
......@@ -406,6 +406,25 @@ def build_metadata_community(metadata=None):
sa.Index('hive_notifs_ix6', 'dst_id', 'created_at', 'score', 'id', postgresql_where=sql_text("dst_id IS NOT NULL")), # unread
)
sa.Table('hive_notification_cache', metadata,
sa.Column('id', sa.BigInteger, primary_key=True),
sa.Column('block_num', sa.Integer, nullable = False),
sa.Column('type_id', sa.Integer, nullable = False),
sa.Column('dst', sa.Integer, nullable=True), # dst account id except persistent notifs from hive_notifs
sa.Column('src', sa.Integer, nullable=True), # src account id
sa.Column('dst_post_id', sa.Integer, nullable=True), # destination post id
sa.Column('post_id', sa.Integer, nullable=True),
sa.Column('created_at', sa.DateTime, nullable=False), # notification creation time
sa.Column('score', sa.Integer, nullable=False),
sa.Column('community_title', sa.String(32), nullable=True),
sa.Column('community', sa.String(16), nullable=True),
sa.Column('payload', sa.String, nullable=True),
sa.Index('hive_notification_cache_block_num_idx', 'block_num'),
sa.Index('hive_notification_cache_dst_score_idx', 'dst', 'score', postgresql_where=sql_text("dst IS NOT NULL"))
)
return metadata
......@@ -589,7 +608,9 @@ def setup(db):
"condenser_get_follow_counts.sql",
"condenser_get_names_by_followers.sql",
"condenser_get_names_by_following.sql",
"condenser_get_names_by_reblogged.sql"
"condenser_get_names_by_reblogged.sql",
"condenser_get_discussions_by_comments.sql",
"condenser_get_account_reputations.sql"
]
from os.path import dirname, realpath
......
DROP FUNCTION IF EXISTS condenser_get_account_reputations;
CREATE OR REPLACE FUNCTION condenser_get_account_reputations(
in _account_lower_bound VARCHAR,
in _without_lower_bound BOOLEAN,
in _limit INTEGER
)
RETURNS TABLE
(
name hive_accounts.name%TYPE,
reputation hive_accounts.reputation%TYPE
)
AS
$function$
DECLARE
BEGIN
RETURN QUERY SELECT
ha.name, ha.reputation
FROM hive_accounts ha
WHERE _without_lower_bound OR ( ha.name >= _account_lower_bound )
ORDER BY name
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS condenser_get_discussions_by_comments;
CREATE OR REPLACE FUNCTION condenser_get_discussions_by_comments(
in _author VARCHAR,
in _permlink VARCHAR,
in _limit INTEGER
)
RETURNS SETOF bridge_api_post
AS
$function$
DECLARE
__post_id INTEGER := 0;
BEGIN
IF _permlink <> '' THEN
__post_id = find_comment_id( _author, _permlink, True );
END IF;
RETURN QUERY SELECT
hp.id,
hp.author,
hp.parent_author,
hp.author_rep,
hp.root_title,
hp.beneficiaries,
hp.max_accepted_payout,
hp.percent_hbd,
hp.url,
hp.permlink,
hp.parent_permlink_or_category,
hp.title,
hp.body,
hp.category,
hp.depth,
hp.promoted,
hp.payout,
hp.pending_payout,
hp.payout_at,
hp.is_paidout,
hp.children,
hp.votes,
hp.created_at,
hp.updated_at,
hp.rshares,
hp.abs_rshares,
hp.json,
hp.is_hidden,
hp.is_grayed,
hp.total_votes,
hp.sc_trend,
hp.role_title,
hp.community_title,
hp.role_id,
hp.is_pinned,
hp.curator_payout_value
FROM hive_posts_view hp
WHERE
hp.author = _author AND hp.depth > 0 AND ( ( __post_id = 0 ) OR ( hp.id <= __post_id ) )
ORDER BY hp.id DESC, hp.depth
LIMIT _limit;
END
$function$
language plpgsql STABLE;
......@@ -54,7 +54,17 @@ for sql in postgres_handle_view_changes.sql \
database_api_list_comments.sql \
database_api_list_votes.sql \
update_posts_rshares.sql \
update_hive_post_root_id.sql
update_hive_post_root_id.sql \
condenser_get_by_replies_to_account.sql \
condenser_get_by_account_comments.sql \
condenser_get_by_blog_without_reblog.sql \
condenser_get_by_feed_with_reblog.sql \
condenser_get_by_blog.sql \
bridge_get_account_posts_by_blog.sql \
condenser_get_follow_counts.sql \
condenser_get_names_by_followers.sql \
condenser_get_names_by_following.sql \
condenser_get_names_by_reblogged.sql
do
echo Executing psql -U $1 -d $2 -f $sql
......
......@@ -44,7 +44,7 @@ BEGIN
RETURN QUERY SELECT
__last_read_at as lastread_at,
count(1) as unread
FROM hive_raw_notifications_view hnv
FROM hive_notification_cache hnv
WHERE hnv.dst = __account_id AND hnv.block_num > __limit_block AND hnv.block_num > __last_read_at_block AND hnv.score >= _minimum_score
;
END
......@@ -82,7 +82,7 @@ BEGIN
FROM
(
select nv.id, nv.type_id, nv.created_at, nv.src, nv.dst, nv.dst_post_id, nv.score, nv.community, nv.community_title, nv.payload
from hive_raw_notifications_view nv
from hive_notification_cache nv
WHERE nv.dst = __account_id AND nv.block_num > __limit_block AND nv.score >= _min_score AND ( _last_id = 0 OR nv.id < _last_id )
ORDER BY nv.id DESC
LIMIT _limit
......@@ -123,7 +123,7 @@ BEGIN
FROM
(
SELECT nv.id, nv.type_id, nv.created_at, nv.src, nv.dst, nv.dst_post_id, nv.score, nv.community, nv.community_title, nv.payload
FROM hive_raw_notifications_view nv
FROM hive_notification_cache nv
WHERE nv.post_id = __post_id AND nv.block_num > __limit_block AND nv.score >= _min_score AND ( _last_id = 0 OR nv.id < _last_id )
ORDER BY nv.id DESC
LIMIT _limit
......@@ -139,3 +139,29 @@ END
$function$
LANGUAGE plpgsql STABLE
;
DROP FUNCTION IF EXISTS update_notification_cache;
;
CREATE OR REPLACE FUNCTION update_notification_cache(in _first_block_num INT, in _last_block_num INT, in _prune_old BOOLEAN)
RETURNS VOID
AS
$function$
DECLARE
__limit_block hive_blocks.num%TYPE = block_before_head( '90 days' );
BEGIN
IF _first_block_num IS NULL THEN
TRUNCATE TABLE hive_notification_cache;
ELSE
DELETE FROM hive_notification_cache nc WHERE _prune_old AND nc.block_num <= __limit_block;
END IF;
INSERT INTO hive_notification_cache
(id, block_num, type_id, created_at, src, dst, dst_post_id, post_id, score, payload, community, community_title)
SELECT nv.id, nv.block_num, nv.type_id, nv.created_at, nv.src, nv.dst, nv.dst_post_id, nv.post_id, nv.score, nv.payload, nv.community, nv.community_title
FROM hive_raw_notifications_view nv
WHERE nv.block_num > __limit_block AND (_first_block_num IS NULL OR nv.block_num BETWEEN _first_block_num AND _last_block_num)
;
END
$function$
LANGUAGE plpgsql VOLATILE
;
......@@ -70,7 +70,11 @@ RETURNS FLOAT
LANGUAGE 'sql'
IMMUTABLE
AS $BODY$
SELECT CAST( ( _post_payout/_post_rshares ) * _vote_rshares as FLOAT);
SELECT CASE _post_rshares != 0
WHEN TRUE THEN CAST( ( _post_payout/_post_rshares ) * _vote_rshares as FLOAT)
ELSE
CAST(0 AS FLOAT)
END
$BODY$;
......
......@@ -108,6 +108,23 @@ END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
IF EXISTS (SELECT * FROM hive_db_data_migration WHERE migration = 'Notification cache initial fill') THEN
RAISE NOTICE 'Performing notification cache initial fill...';
SET work_mem='2GB';
PERFORM update_notification_cache(NULL, NULL, False);
DELETE FROM hive_db_data_migration WHERE migration = 'Notification cache initial fill';
ELSE
RAISE NOTICE 'Skipping notification cache initial fill...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
TRUNCATE TABLE hive_db_data_migration;
......
......@@ -295,3 +295,30 @@ CREATE INDEX IF NOT EXISTS hive_posts_author_id_created_at_idx ON public.hive_po
CREATE INDEX IF NOT EXISTS hive_blocks_created_at_idx ON hive_blocks (created_at);
INSERT INTO hive_db_data_migration
SELECT 'Notification cache initial fill'
WHERE NOT EXISTS (SELECT data_type
FROM information_schema.columns
WHERE table_name = 'hive_notification_cache');
--- Notification cache to significantly speedup notification APIs.
CREATE TABLE IF NOT EXISTS hive_notification_cache
(
id BIGINT NOT NULL,
block_num INT NOT NULL,
type_id INT NOT NULL,
dst INT NULL,
src INT NULL,
dst_post_id INT NULL,
post_id INT NULL,
score INT NOT NULL,
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL,
community_title VARCHAR(32) NULL,
community VARCHAR(16) NULL,
payload VARCHAR NULL,
CONSTRAINT hive_notification_cache_pk PRIMARY KEY (id)
);
CREATE INDEX IF NOT EXISTS hive_notification_cache_block_num_idx ON hive_notification_cache (block_num);
CREATE INDEX IF NOT EXISTS hive_notification_cache_dst_score_idx ON hive_notification_cache (dst, score) WHERE dst IS NOT NULL;
......@@ -432,13 +432,16 @@ class Blocks:
"""
update_active_starting_from_posts_on_block( first_block, last_block )
is_hour_action = last_block % 1200 == 0
queries = [
"SELECT update_posts_rshares({}, {})".format(first_block, last_block),
"SELECT update_hive_posts_children_count({}, {})".format(first_block, last_block),
"SELECT update_hive_posts_root_id({},{})".format(first_block, last_block),
"SELECT update_hive_posts_api_helper({},{})".format(first_block, last_block),
"SELECT update_feed_cache({}, {})".format(first_block, last_block),
"SELECT update_hive_posts_mentions({}, {})".format(first_block, last_block)
"SELECT update_hive_posts_mentions({}, {})".format(first_block, last_block),
"SELECT update_notification_cache({}, {}, {})".format(first_block, last_block, is_hour_action)
#,"SELECT update_account_reputations({}, {})".format(first_block, last_block)
]
......
......@@ -284,7 +284,7 @@ async def get_account_posts(context, sort:str, account:str, start_author:str='',
sql = None
account_posts = True # set when only posts (or reblogs) of given account are supposed to be in results
if sort == 'blog':
sql = "SELECT * FROM bridge_get_account_posts_by_blog( (:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
sql = "SELECT * FROM bridge_get_account_posts_by_blog( (:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::INTEGER )"
elif sort == 'feed':
return await _get_account_posts_by_feed(db, account, start_author, start_permlink, limit)
elif sort == 'posts':
......
......@@ -7,18 +7,6 @@ from hive.server.database_api.methods import find_votes_impl, VotesPresentation
# pylint: disable=too-many-lines
async def get_post_id(db, author, permlink):
"""Given an author/permlink, retrieve the id from db."""
sql = """
SELECT
hp.id
FROM hive_posts hp
INNER JOIN hive_accounts ha_a ON ha_a.id = hp.author_id
INNER JOIN hive_permlink_data hpd_p ON hpd_p.id = hp.permlink_id
WHERE ha_a.name = :author AND hpd_p.permlink = :permlink
AND counter_deleted = 0 LIMIT 1""" # ABW: replace with find_comment_id(:author,:permlink,True)?
return await db.query_one(sql, author=author, permlink=permlink)
async def get_followers(db, account: str, start: str, follow_type: str, limit: int):
"""Get a list of accounts following by a given account."""
state = 2 if follow_type == 'ignore' else 1
......
......@@ -20,51 +20,6 @@ from hive.server.hive_api.public import get_by_feed_with_reblog_impl
# pylint: disable=too-many-arguments,line-too-long,too-many-lines
SQL_TEMPLATE = """
SELECT
hp.id,
hp.author,
hp.permlink,
hp.author_rep,
hp.title,
hp.body,
hp.category,
hp.depth,
hp.promoted,
hp.payout,
hp.pending_payout,
hp.payout_at,
hp.is_paidout,
hp.children,
hp.votes,
hp.created_at,
hp.updated_at,
hp.rshares,
hp.abs_rshares,
hp.json,
hp.is_hidden,
hp.is_grayed,
hp.total_votes,
hp.net_votes,
hp.total_vote_weight,
hp.parent_author,
hp.parent_permlink_or_category,
hp.curator_payout_value,
hp.root_author,
hp.root_permlink,
hp.max_accepted_payout,
hp.percent_hbd,
hp.allow_replies,
hp.allow_votes,
hp.allow_curation_rewards,
hp.beneficiaries,
hp.url,
hp.root_title,
hp.active,
hp.author_rewards
FROM hive_posts_view hp
"""
@return_error_info
async def get_account_votes(context, account):
"""Return an info message about get_acccount_votes being unsupported."""
......@@ -137,15 +92,8 @@ async def get_account_reputations(context, account_lower_bound: str = None, limi
async def _get_account_reputations_impl(db, fat_node_style, account_lower_bound, limit):
"""Enumerate account reputations."""
limit = valid_limit(limit, 1000, None)
seek = ''
if account_lower_bound:
seek = "WHERE name >= :start"
sql = """SELECT name, reputation
FROM hive_accounts %s
ORDER BY name
LIMIT :limit""" % seek
sql = "SELECT * FROM condenser_get_account_reputations( '{}', {}, {} )".format( account_lower_bound, account_lower_bound is None, limit )
rows = await db.query_all(sql, start=account_lower_bound, limit=limit)
if fat_node_style:
return [dict(account=r[0], reputation=r[1]) for r in rows]
......@@ -370,7 +318,6 @@ async def get_discussions_by_feed(context, tag: str = None, start_author: str =
valid_limit(limit, 100, 20),
truncate_body)
@return_error_info
@nested_query_compat
async def get_discussions_by_comments(context, start_author: str = None, start_permlink: str = '',
......@@ -383,25 +330,11 @@ async def get_discussions_by_comments(context, start_author: str = None, start_p
valid_permlink(start_permlink, allow_empty=True)
valid_limit(limit, 100, 20)
#force copy
sql = str(SQL_TEMPLATE)
sql += """
WHERE
hp.author = :start_author AND hp.depth > 0
"""
if start_permlink:
sql += """
AND hp.id <= (SELECT hive_posts.id FROM hive_posts WHERE author_id = (SELECT id FROM hive_accounts WHERE name = :start_author) AND permlink_id = (SELECT id FROM hive_permlink_data WHERE permlink = :start_permlink))
"""
sql += """
ORDER BY hp.id DESC, hp.depth LIMIT :limit
"""
posts = []
db = context['db']
result = await db.query_all(sql, start_author=start_author, start_permlink=start_permlink, limit=limit)
sql = " SELECT * FROM condenser_get_discussions_by_comments( '{}', '{}', {} ) ".format( start_author, start_permlink, limit )
result = await db.query_all(sql)
for row in result:
row = dict(row)
......
......@@ -17,24 +17,6 @@ async def get_community_id(db, name):
return await db.query_one("SELECT id FROM hive_communities WHERE name = :name",
name=name)
async def url_to_id(db, url):
"""Get post_id based on post url."""
return await get_post_id(db, *split_url(url))
async def get_post_id(db, author, permlink):
"""Get post_id based on author/permlink."""
sql = """
SELECT
hp.id, ha_a.name as author, hpd_p.permlink as permlink
FROM
hive_posts hp
INNER JOIN hive_accounts ha_a ON ha_a.id = hp.author_id
INNER JOIN hive_permlink_data hpd_p ON hpd_p.id = hp.permlink_id
WHERE ha_a.name = :a AND hpd_p.permlink = :p"""
_id = await db.query_one(sql, a=author, p=permlink)
assert _id, 'post id not found'
return _id
async def get_account_id(db, name):
"""Get account id from account name."""
assert name, 'no account name specified'
......
......@@ -83,7 +83,7 @@ async def get_info(context):
async def get_by_feed_with_reblog_impl(db, account: str, start_author: str = '',
start_permlink: str = '', limit: int = 20, truncate_body: int = 0):
"""Get a list of [post_id, reblogged_by_str] for an account's feed."""
"""Get a list of posts for an account's feed."""
sql = " SELECT * FROM condenser_get_by_feed_with_reblog( '{}', '{}', '{}', {} ) ".format( account, start_author, start_permlink, limit )
result = await db.query_all(sql)
......
"""Hive API: Threaded discussion handling"""
import logging
from hive.server.hive_api.common import url_to_id, valid_comment_sort, valid_limit
from hive.server.hive_api.objects import comments_by_id
log = logging.getLogger(__name__)
# pylint: disable=too-many-arguments
async def fetch_tree(context, root, sort='top', limit=20, observer=None):
"""Fetch comment tree. Includes comments and lite author data.
If community: follows/applies mod rules
If blog: hides comments by any muted accounts of the author's
Sort: new, old, hot, payout"""
db = context['db']
root_id = await url_to_id(db, root)
return await _fetch_children(db, root_id, None,
valid_comment_sort(sort),
valid_limit(limit, 50, 20),
observer)
async def fetch_more_children(context, root_id, last_sibling_id, sort='top',
limit=20, observer=None):
"""Fetch truncated siblings from tree."""
db = context['db']
return await _fetch_children(db, root_id, last_sibling_id,
valid_comment_sort(sort),
valid_limit(limit, 50, 20),
observer)
_SORTS = dict(hot='sc_hot', top='payout', new='id')
async def _fetch_children(db, root_id, start_id, sort, limit, observer=None):
"""Fetch truncated children from tree."""
mutes = set()
field = _SORTS[sort]
# load id skeleton
tree, parent = await _load_tree(db, root_id, mutes, max_depth=3)
# find most relevant ids in subset
seek = ''
if start_id:
seek = """AND %s < (SELECT %s FROM hive_posts
WHERE id = :start_id)""" % (field, field)
sql = """SELECT id FROM hive_posts
WHERE id IN :ids %s ORDER BY %s DESC
LIMIT :limit""" % (seek, field)
relevant_ids = await db.query_col(sql, ids=tuple(parent.keys()),
start_id=start_id, limit=limit)
# fill in missing parents
for _id in relevant_ids:
if _id != root_id:
if parent[_id] not in relevant_ids:
relevant_ids.append(parent[_id])
# load objects and assemble response tree
comments = await comments_by_id(db, relevant_ids, observer)
return {'accounts': comments['accounts'],
'posts': _build_tree(tree[root_id], tree, comments['posts'], sort_ids=relevant_ids)}
def _build_tree(root_ids, tree, comments, sort_ids):
# comments is sorted...
ret = []
for root_id in sorted(root_ids, key=sort_ids.index):
assert root_id in comments, 'root not loaded'
out = comments[root_id]
out['type'] = 'comment'
if root_id in tree:
missing = 0
loaded_ids = []
for cid in tree[root_id]:
if cid in comments:
assert not missing, 'missing mode: not expected to find'
loaded_ids.append(cid)
else:
missing += 1
if loaded_ids:
out['children'] = _build_tree(loaded_ids, tree, comments, sort_ids)
else:
out['children'] = []
if missing:
last_id = loaded_ids[-1] if loaded_ids else None
out['children'].append({'type': 'more-children',
'root_id': root_id,
'last_id': last_id,
'count': missing})
ret.append(out)
return ret
async def _load_tree(db, root_id, muted, max_depth):
"""Build `ids` list and `tree` map."""
parent = {} # only loaded to max_depth
tree = {} # loaded to max_depth + 1
todo = [root_id]
depth = 0
while todo:
depth += 1
rows = await _child_ids(db, todo, muted)
todo = []
for pid, cids in rows:
tree[pid] = cids
todo.extend(cids)
if depth <= max_depth:
for cid in cids:
parent[cid] = pid
if depth > max_depth:
break
return (tree, parent)
async def _child_ids(db, parent_ids, muted):
"""Load child ids for multiple parent ids."""
filt = 'AND author NOT IN :muted' if muted else ''
sql = """
SELECT parent_id, array_agg(id)
FROM hive_posts
WHERE parent_id IN :ids
AND counter_deleted = 0
AND is_muted = '0'
AND is_valid = '1' %s
GROUP BY parent_id
""" % filt
rows = await db.query_all(sql, ids=tuple(parent_ids), muted=tuple(muted))
return [[row[0], row[1]] for row in rows]
#!/bin/bash
set -e
pip3 install tox --user
export HIVEMIND_ADDRESS=$1
export HIVEMIND_PORT=$2
export TAVERN_DISABLE_COMPARATOR=true
echo Attempting to start benchmarks on hivemind instance listeing on: $HIVEMIND_ADDRESS port: $HIVEMIND_PORT
ITERATIONS=$3
for (( i=0; i<$ITERATIONS; i++ ))
do
echo About to run iteration $i
tox -e tavern-benchmark -- -W ignore::pytest.PytestDeprecationWarning -n auto --junitxml=../../../../benchmarks-$i.xml
echo Done!
done
./scripts/xml_report_parser.py . ./tests/tests_api/hivemind/tavern
#!/usr/bin/python3
import os
from xml.dom import minidom
def process_file_name(file_name, tavern_root_dir):
tavern_root_dir_dot = tavern_root_dir.replace("/", ".")
file_name_dot = file_name.replace("/", ".")
return file_name_dot.replace(tavern_root_dir_dot, "").lstrip(".")
def get_requests_from_yaml(tavern_root_dir):
from fnmatch import fnmatch
import yaml
from json import dumps
ret = {}
pattern = "*.tavern.yaml"
for path, subdirs, files in os.walk(tavern_root_dir):
for name in files:
if fnmatch(name, pattern):
test_file = os.path.join(path, name)
yaml_document = None
with open(test_file, "r") as yaml_file:
yaml_document = yaml.load(yaml_file, Loader=yaml.BaseLoader)
if "stages" in yaml_document:
if "request" in yaml_document["stages"][0]:
json_parameters = yaml_document["stages"][0]["request"].get("json", None)
assert json_parameters is not None, "Unable to find json parameters in request"
ret[process_file_name(test_file, tavern_root_dir)] = dumps(json_parameters)
return ret
def parse_xml_files(root_dir):
ret = {}
print("Scanning path: {}".format(root_dir))
for name in os.listdir(root_dir):
file_path = os.path.join(root_dir, name)
if os.path.isfile(file_path) and name.startswith("benchmarks") and file_path.endswith(".xml"):
print("Processing file: {}".format(file_path))
xmldoc = minidom.parse(file_path)
test_cases = xmldoc.getElementsByTagName('testcase')
for test_case in test_cases:
test_name = test_case.attributes['classname'].value
test_time = float(test_case.attributes['time'].value)
if test_name in ret:
ret[test_name].append(test_time)
else:
ret[test_name] = [test_time]
return ret
if __name__ == "__main__":
import argparse
from statistics import mean
parser = argparse.ArgumentParser()
parser.add_argument("xml_report_dir", type=str, help="Path to benchmark xml reports")
parser.add_argument("tavern_root_dir", type=str, help="Path to tavern tests root dir")
parser.add_argument("--time-threshold", dest="time_threshold", type=float, default=1.0, help="Time threshold for test execution time, tests with execution time greater than threshold will be marked on red.")
args = parser.parse_args()
assert os.path.exists(args.xml_report_dir), "Please provide valid xml report path"
assert os.path.exists(args.tavern_root_dir), "Please provide valid tavern path"
report_data = parse_xml_files(args.xml_report_dir)
request_data = get_requests_from_yaml(args.tavern_root_dir)
html_file = "tavern_benchmarks_report.html"
above_treshold = []
with open(html_file, "w") as ofile:
ofile.write("<html>\n")
ofile.write(" <head>\n")
ofile.write(" <style>\n")
ofile.write(" table, th, td {\n")
ofile.write(" border: 1px solid black;\n")
ofile.write(" border-collapse: collapse;\n")
ofile.write(" }\n")
ofile.write(" th, td {\n")
ofile.write(" padding: 15px;\n")
ofile.write(" }\n")
ofile.write(" </style>\n")
ofile.write(" </head>\n")
ofile.write(" <body>\n")
ofile.write(" <table>\n")
ofile.write(" <tr><th>Test name</th><th>Min time [s]</th><th>Max time [s]</th><th>Mean time [s]</th></tr>\n")
for name, data in report_data.items():
dmin = min(data)
dmax = max(data)
dmean = mean(data)
if dmean > args.time_threshold:
ofile.write(" <tr><td>{}<br/>Parameters: {}</td><td>{:.4f}</td><td>{:.4f}</td><td bgcolor=\"red\">{:.4f}</td></tr>\n".format(name, request_data[name], dmin, dmax, dmean))
above_treshold.append((name, "{:.4f}".format(dmean), request_data[name]))
else:
ofile.write(" <tr><td>{}</td><td>{:.4f}</td><td>{:.4f}</td><td>{:.4f}</td></tr>\n".format(name, dmin, dmax, dmean))
ofile.write(" </table>\n")
ofile.write(" </body>\n")
ofile.write("</html>\n")
if above_treshold:
from prettytable import PrettyTable
summary = PrettyTable()
print("########## Test failed with following tests above {}s threshold ##########".format(args.time_threshold))
summary.field_names = ['Test name', 'Mean time [s]', 'Call parameters']
for entry in above_treshold:
summary.add_row(entry)
print(summary)
exit(2)
exit(0)
<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite errors="0" failures="1" hostname="dev7" name="pytest" skipped="0" tests="1" time="0.620" timestamp="2020-11-02T14:41:30.207316"><testcase classname="condenser_api_patterns.get_state.gtg_permlink.tavern.yaml" name="Hivemind condenser_api.get_state patterns test" time="0.524"><failure message="Format variables:&#10; service.proto:s = 'http'&#10; service.server:s = '127.0.0.1'&#10; service.port = '6666'&#10;&#10;Source test stage (line 10):&#10; - name: get_state&#10; request:&#10; url: &quot;{service.proto:s}://{service.server:s}:{service.port}/&quot;&#10; method: POST&#10; headers:&#10; content-type: application/json&#10; json:&#10; jsonrpc: &quot;2.0&quot;&#10; id: 1&#10; method: &quot;condenser_api.get_state&quot;&#10; params: [&quot;/category/@gtg/witness-gtg&quot;]&#10; response:&#10; status_code: 200&#10; verify_response_with:&#10; function: validate_response:compare_response_with_pattern&#10; extra_kwargs:&#10; method: &quot;gtg_permlink&quot;&#10;&#10;Formatted stage:&#10; name: get_state&#10; request:&#10; headers:&#10; content-type: application/json&#10; json:&#10; id: 1&#10; jsonrpc: '2.0'&#10; method: condenser_api.get_state&#10; params:&#10; - /category/@gtg/witness-gtg&#10; method: POST&#10; url: 'http://127.0.0.1:6666/'&#10; response:&#10; status_code: 200&#10; verify_response_with:&#10; extra_kwargs:&#10; directory: condenser_api_patterns/get_state&#10; method: gtg_permlink&#10; function: validate_response:compare_response_with_pattern&#10;&#10;Errors:&#10;E tavern.util.exceptions.TestFailError: Test 'get_state' failed:&#10; - Error calling validate function '&lt;function compare_response_with_pattern at 0x7fb40ec70598&gt;':&#10; Traceback (most recent call last):&#10; File &quot;/home/dev/.local/lib/python3.6/site-packages/tavern/response/base.py&quot;, line 141, in _maybe_run_validate_functions&#10; vf(response)&#10; File &quot;/home/dev/.local/lib/python3.6/site-packages/tavern/schemas/extensions.py&quot;, line 123, in inner&#10; return func(response, *args, **kwargs)&#10; File &quot;/home/dev/src/09.HIVE-HIVEMIND/hivemind/tests/tests_api/hivemind/tavern/validate_response.py&quot;, line 74, in compare_response_with_pattern&#10; raise PatternDiffException(msg)&#10; validate_response.PatternDiffException: Differences detected between response and pattern.">Format variables:
service.proto:s = 'http'
service.server:s = '127.0.0.1'
service.port = '6666'
Source test stage (line 10):
- name: get_state
request:
url: "{service.proto:s}://{service.server:s}:{service.port}/"
method: POST
headers:
content-type: application/json
json:
jsonrpc: "2.0"
id: 1
method: "condenser_api.get_state"
params: ["/category/@gtg/witness-gtg"]
response:
status_code: 200
verify_response_with:
function: validate_response:compare_response_with_pattern
extra_kwargs:
method: "gtg_permlink"
Formatted stage:
name: get_state
request:
headers:
content-type: application/json
json:
id: 1
jsonrpc: '2.0'
method: condenser_api.get_state
params:
- /category/@gtg/witness-gtg
method: POST
url: 'http://127.0.0.1:6666/'
response:
status_code: 200
verify_response_with:
extra_kwargs:
directory: condenser_api_patterns/get_state
method: gtg_permlink
function: validate_response:compare_response_with_pattern
Errors:
E tavern.util.exceptions.TestFailError: Test 'get_state' failed:
- Error calling validate function '&lt;function compare_response_with_pattern at 0x7fb40ec70598&gt;':
Traceback (most recent call last):
File "/home/dev/.local/lib/python3.6/site-packages/tavern/response/base.py", line 141, in _maybe_run_validate_functions
vf(response)
File "/home/dev/.local/lib/python3.6/site-packages/tavern/schemas/extensions.py", line 123, in inner
return func(response, *args, **kwargs)
File "/home/dev/src/09.HIVE-HIVEMIND/hivemind/tests/tests_api/hivemind/tavern/validate_response.py", line 74, in compare_response_with_pattern
raise PatternDiffException(msg)
validate_response.PatternDiffException: Differences detected between response and pattern.</failure></testcase></testsuite></testsuites>
\ No newline at end of file