Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • hive/hivemind
1 result
Show changes
Commits on Source (17)
Showing
with 65 additions and 204 deletions
......@@ -367,7 +367,7 @@ stages:
localhost $RUNNER_HIVEMIND_SERVER_HTTP_PORT \
$RUNNER_BENCHMARK_ITERATIONS \
$RUNNER_BENCHMARK_JOBS
- ./scripts/xml_report_parser.py . ./tests/tests_api/hivemind/tavern
- ./scripts/xml_report_parser.py --time-threshold=1.5 . ./tests/tests_api/hivemind/tavern
.api-smoketest-benchmark:
stage: benchmark-tests
......@@ -409,6 +409,7 @@ sync-e2e-benchmark:
when: always
paths:
- hivemind-sync.log
- hivemind-server.log
- pg-stats
- hive-sync-runner-id.txt
- tavern_benchmarks_report.html
......
......@@ -250,6 +250,13 @@ class DbState:
force_index_rebuild = True
massive_sync_preconditions = True
def vacuum_hive_posts():
if massive_sync_preconditions:
time_start = perf_counter()
DbState.db().query_no_return( "VACUUM ANALYZE hive_posts" )
time_end = perf_counter()
log.info("[INIT] VACUUM ANALYZE hive_posts executed in %.4fs", time_end - time_start)
#is_pre_process, drop, create
cls.processing_indexes( False, force_index_rebuild, True )
......@@ -276,10 +283,7 @@ class DbState:
time_end = perf_counter()
log.info("[INIT] update_hive_posts_children_count executed in %.4fs", time_end - time_start)
time_start = perf_counter()
DbState.db().query_no_return( "VACUUM ANALYZE hive_posts" )
time_end = perf_counter()
log.info("[INIT] VACUUM ANALYZE hive_posts executed in %.4fs", time_end - time_start)
vacuum_hive_posts()
time_start = perf_counter()
# Update root_id all root posts
......@@ -290,10 +294,7 @@ class DbState:
time_end = perf_counter()
log.info("[INIT] update_hive_posts_root_id executed in %.4fs", time_end - time_start)
time_start = perf_counter()
DbState.db().query_no_return( "VACUUM ANALYZE hive_posts" )
time_end = perf_counter()
log.info("[INIT] VACUUM ANALYZE hive_posts executed in %.4fs", time_end - time_start)
vacuum_hive_posts()
time_start = perf_counter()
......@@ -313,14 +314,10 @@ class DbState:
time_end = perf_counter()
log.info("[INIT] update_all_posts_active executed in %.4fs", time_end - time_start)
time_start = perf_counter()
DbState.db().query_no_return( "VACUUM ANALYZE hive_posts" )
time_end = perf_counter()
log.info("[INIT] VACUUM ANALYZE hive_posts executed in %.4fs", time_end - time_start)
vacuum_hive_posts()
time_start = perf_counter()
sql = """
SELECT update_feed_cache({}, {});
""".format(last_imported_block, current_imported_block)
......@@ -362,7 +359,8 @@ class DbState:
DbState.db().query_no_return(sql)
time_end = perf_counter()
log.info("[INIT] update_posts_rshares executed in %.4fs", time_end - time_start)
# add here 'vacuum analyze hive_posts' when You want to add below more actions which update hive_posts table
vacuum_hive_posts()
time_start = perf_counter()
sql = """
......@@ -393,10 +391,11 @@ class DbState:
log.info("Recreating FKs")
create_fk(cls.db())
time_start = perf_counter()
DbState.db().query_no_return( "VACUUM ANALYZE" )
time_end = perf_counter()
log.info("[INIT] VACUUM ANALYZE executed in %.4fs", time_end - time_start)
time_start = perf_counter()
DbState.db().query_no_return( "VACUUM ANALYZE" )
time_end = perf_counter()
log.info("[INIT] VACUUM ANALYZE executed in %.4fs", time_end - time_start)
@staticmethod
......
......@@ -586,7 +586,6 @@ def setup(db):
"condenser_api_post_ex_type.sql",
"condenser_get_blog.sql",
"condenser_get_content.sql",
"condenser_get_discussions_by_blog.sql",
"condenser_tags.sql",
"condenser_follows.sql",
"hot_and_trends.sql",
......
......@@ -4,19 +4,20 @@ CREATE OR REPLACE FUNCTION bridge_get_account_posts_by_blog(
in _account VARCHAR,
in _author VARCHAR,
in _permlink VARCHAR,
in _limit INTEGER
in _limit INTEGER,
in _bridge_api BOOLEAN
)
RETURNS SETOF bridge_api_post
AS
$function$
DECLARE
__post_id INTEGER := 0;
__account_id INTEGER := find_account_id( _account, True );
__post_id INTEGER;
__account_id INTEGER;
__created_at TIMESTAMP;
BEGIN
IF _permlink <> '' THEN
__post_id = find_comment_id( _author, _permlink, True );
__account_id = find_account_id( _account, True );
__post_id = find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT hfc.created_at INTO __created_at
FROM hive_feed_cache hfc
WHERE hfc.account_id = __account_id AND hfc.post_id = __post_id;
......@@ -65,16 +66,18 @@ BEGIN
(
SELECT hfc.post_id, hfc.created_at
FROM hive_feed_cache hfc
WHERE hfc.account_id = __account_id AND (__post_id = 0 OR hfc.created_at <= __created_at)
AND NOT EXISTS (SELECT NULL FROM hive_posts hp
WHERE hp.id = hfc.post_id AND hp.counter_deleted = 0 AND hp.depth = 0 AND hp.community_id IS NOT NULL
AND NOT EXISTS (SELECT NULL FROM hive_reblogs hr WHERE hr.blogger_id = __account_id AND hr.post_id = hp.id)
WHERE hfc.account_id = __account_id AND ( __post_id = 0 OR hfc.created_at < __created_at OR ( hfc.created_at = __created_at AND hfc.post_id < __post_id ) )
AND ( NOT _bridge_api OR
NOT EXISTS (SELECT NULL FROM hive_posts hp1
WHERE hp1.id = hfc.post_id AND hp1.counter_deleted = 0 AND hp1.depth = 0 AND hp1.community_id IS NOT NULL
AND NOT EXISTS (SELECT NULL FROM hive_reblogs hr WHERE hr.blogger_id = __account_id AND hr.post_id = hp1.id)
)
)
ORDER BY created_at DESC, post_id DESC
ORDER BY hfc.created_at DESC, hfc.post_id DESC
LIMIT _limit
)T ON hp.id = T.post_id
ORDER BY T.created_at DESC, T.post_id DESC
;
) blog ON hp.id = blog.post_id
ORDER BY blog.created_at DESC, blog.post_id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS get_discussion
DROP FUNCTION IF EXISTS bridge_get_discussion
;
CREATE OR REPLACE FUNCTION get_discussion(
CREATE OR REPLACE FUNCTION bridge_get_discussion(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE,
in _observer VARCHAR
......@@ -14,9 +14,9 @@ DECLARE
__observer_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
__observer_id = find_account_id(_observer, False);
__observer_id = find_account_id( _observer, True );
RETURN QUERY
SELECT
SELECT -- bridge_get_discussion
hpv.id,
hpv.author,
hpv.parent_author,
......
......@@ -514,7 +514,6 @@ BEGIN
LIMIT _limit
) as trends
JOIN hive_posts_view hp ON hp.id = trends.id
WHERE (CASE WHEN _observer IS NOT NULL THEN NOT EXISTS (SELECT 1 FROM muted_accounts_view WHERE observer = _observer AND muted = hp.author) ELSE true END)
ORDER BY trends.trend DESC, trends.id DESC
LIMIT _limit;
END
......
......@@ -25,7 +25,7 @@ DECLARE
__start_id INT;
BEGIN
__account_id = find_account_id( _account, True );
__start_id = find_account_id( _start, _start <> '' );
__start_id = find_account_id( _start, True );
IF __start_id <> 0 THEN
SELECT INTO __start_id ( SELECT id FROM hive_follows WHERE following = __account_id AND follower = __start_id );
END IF;
......@@ -53,7 +53,7 @@ DECLARE
__start_id INT;
BEGIN
__account_id = find_account_id( _account, True );
__start_id = find_account_id( _start, _start <> '' );
__start_id = find_account_id( _start, True );
IF __start_id <> 0 THEN
SELECT INTO __start_id ( SELECT id FROM hive_follows WHERE follower = __account_id AND following = __start_id );
END IF;
......
DROP TYPE IF EXISTS get_discussions_post CASCADE;
CREATE TYPE get_discussions_post AS (
id INT,
community_id INT,
author VARCHAR(16),
permlink VARCHAR(255),
author_rep BIGINT,
title VARCHAR(512),
body TEXT,
category VARCHAR(255),
depth SMALLINT,
promoted DECIMAL(10, 3),
payout DECIMAL(10, 3),
payout_at TIMESTAMP,
pending_payout DECIMAL(10, 3),
is_paidout BOOLEAN,
children INT,
votes INT,
active_votes INT,
created_at TIMESTAMP,
updated_at TIMESTAMP,
rshares NUMERIC,
json TEXT,
is_hidden BOOLEAN,
is_grayed BOOLEAN,
total_votes BIGINT,
parent_author VARCHAR(16),
parent_permlink_or_category VARCHAR(255),
curator_payout_value VARCHAR(30),
root_author VARCHAR(16),
root_permlink VARCHAR(255),
max_accepted_payout VARCHAR(30),
percent_hbd INT,
allow_replies BOOLEAN,
allow_votes BOOLEAN,
allow_curation_rewards BOOLEAN,
beneficiaries JSON,
url TEXT,
root_title VARCHAR(512)
);
DROP FUNCTION IF EXISTS get_created_at_for_post;
CREATE OR REPLACE FUNCTION get_created_at_for_post(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE
)
RETURNS TIMESTAMP
AS
$function$
DECLARE
__post_id INT;
__timestamp TIMESTAMP;
BEGIN
__post_id = find_comment_id(_author, _permlink, False);
IF __post_id = 0 THEN
RETURN current_timestamp;
END IF;
SELECT INTO __timestamp
created_at
FROM
hive_posts hp
WHERE
hp.id = __post_id;
RETURN __timestamp;
END
$function$
language 'plpgsql';
DROP FUNCTION IF EXISTS get_discussions_by_blog;
CREATE OR REPLACE FUNCTION get_discussions_by_blog(
in _tag hive_accounts.name%TYPE,
in _start_author hive_accounts.name%TYPE,
in _start_permlink hive_permlink_data.permlink%TYPE,
in _limit INT
)
RETURNS SETOF get_discussions_post
AS
$function$
DECLARE
__created_at TIMESTAMP;
BEGIN
__created_at = get_created_at_for_post(_start_author, _start_permlink);
RETURN QUERY SELECT
hp.id,
hp.community_id,
hp.author,
hp.permlink,
hp.author_rep,
hp.title,
hp.body,
hp.category,
hp.depth,
hp.promoted,
hp.payout,
hp.payout_at,
hp.pending_payout,
hp.is_paidout,
hp.children,
hp.votes,
hp.active_votes,
hp.created_at,
hp.updated_at,
hp.rshares,
hp.json,
hp.is_hidden,
hp.is_grayed,
hp.total_votes,
hp.parent_author,
hp.parent_permlink_or_category,
hp.curator_payout_value,
hp.root_author,
hp.root_permlink,
hp.max_accepted_payout,
hp.percent_hbd,
hp.allow_replies,
hp.allow_votes,
hp.allow_curation_rewards,
hp.beneficiaries,
hp.url,
hp.root_title
FROM hive_posts_view hp
INNER JOIN
(
SELECT
post_id
FROM
hive_feed_cache hfc
INNER JOIN hive_accounts hfc_ha ON hfc.account_id = hfc_ha.id
INNER JOIN hive_posts hfc_hp ON hfc.post_id = hfc_hp.id
WHERE
hfc_ha.name = _tag
AND hfc_hp.created_at <= __created_at
ORDER BY
hfc_hp.created_at DESC
LIMIT _limit
) ds on ds.post_id = hp.id
ORDER BY hp.created_at DESC;
END
$function$
language 'plpgsql';
\ No newline at end of file
......@@ -117,7 +117,7 @@ DECLARE __voter_id INT;
DECLARE __post_id INT;
BEGIN
__voter_id = find_account_id( _VOTER, _VOTER != '' ); -- voter is optional
__voter_id = find_account_id( _VOTER, True );
__post_id = find_comment_id( _AUTHOR, _PERMLINK, True );
RETURN QUERY
......
......@@ -44,7 +44,6 @@ for sql in postgres_handle_view_changes.sql \
condenser_api_post_ex_type.sql \
condenser_get_blog.sql \
condenser_get_content.sql \
condenser_get_discussions_by_blog.sql \
condenser_tags.sql \
condenser_follows.sql \
hot_and_trends.sql \
......
......@@ -228,7 +228,7 @@ IF NOT EXISTS(SELECT data_type FROM information_schema.columns
UPDATE hive_posts hp
SET
tags_ids = tags.tags
tags_ids = tags.tags
FROM
(
SELECT
......@@ -237,7 +237,7 @@ IF NOT EXISTS(SELECT data_type FROM information_schema.columns
FROM
hive_post_tags hpt
GROUP BY post_id
) as tags
) as tags
WHERE hp.id = tags.post_id;
ELSE
RAISE NOTICE 'SKIPPING hive_posts upgrade - adding a tags_ids column';
......
......@@ -64,13 +64,15 @@ LANGUAGE 'plpgsql'
AS
$function$
DECLARE
account_id INT;
__account_id INT = 0;
BEGIN
SELECT INTO account_id COALESCE( ( SELECT id FROM hive_accounts WHERE name=_account ), 0 );
IF _check AND account_id = 0 THEN
RAISE EXCEPTION 'Account % does not exist', _account;
IF (_account <> '') THEN
SELECT INTO __account_id COALESCE( ( SELECT id FROM hive_accounts WHERE name=_account ), 0 );
IF _check AND __account_id = 0 THEN
RAISE EXCEPTION 'Account % does not exist', _account;
END IF;
END IF;
RETURN account_id;
RETURN __account_id;
END
$function$
;
......@@ -280,7 +280,7 @@ async def get_account_posts(context, sort:str, account:str, start_author:str='',
sql = None
account_posts = True # set when only posts (or reblogs) of given account are supposed to be in results
if sort == 'blog':
sql = "SELECT * FROM bridge_get_account_posts_by_blog( (:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::INTEGER )"
sql = "SELECT * FROM bridge_get_account_posts_by_blog( (:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::INTEGER, True )"
elif sort == 'feed':
sql = "SELECT * FROM bridge_get_by_feed_with_reblog((:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::INTEGER)"
elif sort == 'posts':
......
......@@ -13,20 +13,19 @@ from hive.server.common.mutes import Mutes
log = logging.getLogger(__name__)
@return_error_info
async def get_discussion(context, author, permlink, observer=None):
async def get_discussion(context, author:str, permlink:str, observer:str=''):
"""Modified `get_state` thread implementation."""
# New index was created: hive_posts_parent_id_btree (CREATE INDEX "hive_posts_parent_id_btree" ON hive_posts btree(parent_id)
# We thougth this would be covered by "hive_posts_ix4" btree (parent_id, id) WHERE counter_deleted = 0 but it was not
db = context['db']
author = valid_account(author)
permlink = valid_permlink(permlink)
observer = valid_account(observer, allow_empty=True)
blacklisted_for_user = None
if observer:
blacklisted_for_user = await Mutes.get_blacklisted_for_observer(observer, context)
sql = "SELECT * FROM get_discussion(:author,:permlink,:observer)"
sql = "SELECT * FROM bridge_get_discussion(:author,:permlink,:observer)"
rows = await db.query_all(sql, author=author, permlink=permlink, observer=observer)
if not rows or len(rows) == 0:
return {}
......
......@@ -237,7 +237,7 @@ async def _child_ids(db, parent_ids):
async def _load_discussion(db, author, permlink, observer=None):
"""Load a full discussion thread."""
sql = "SELECT * FROM get_discussion(:author,:permlink,:observer)"
sql = "SELECT * FROM bridge_get_discussion(:author,:permlink,:observer)"
sql_result = await db.query_all(sql, author=author, permlink=permlink, observer=observer)
muted_accounts = Mutes.all()
......
......@@ -291,12 +291,10 @@ async def get_discussions_by_blog(context, tag: str = None, start_author: str =
valid_permlink(start_permlink, allow_empty=True)
valid_limit(limit, 100, 20)
sql = """
SELECT * FROM get_discussions_by_blog(:author, :start_author, :start_permlink, :limit)
"""
sql = "SELECT * FROM bridge_get_account_posts_by_blog( (:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::INTEGER, False )"
db = context['db']
result = await db.query_all(sql, author=tag, start_author=start_author, start_permlink=start_permlink, limit=limit)
result = await db.query_all(sql, account=tag, author=start_author, permlink=start_permlink, limit=limit)
posts_by_id = []
for row in result:
......
......@@ -22,7 +22,7 @@ STRINGS = {
# personal
NotifyType.error: 'error: <payload>',
NotifyType.reblog: '<src> resteemed your post',
NotifyType.reblog: '<src> reblogged your post',
NotifyType.follow: '<src> followed you',
NotifyType.reply: '<src> replied to your post',
NotifyType.reply_comment: '<src> replied to your comment',
......
......@@ -3,7 +3,6 @@ from hive.server.common.helpers import (
return_error_info,
valid_account,
valid_permlink)
from hive.server.database_api.methods import find_votes_impl, VotesPresentation
@return_error_info
async def get_discussion(context, author: str, permlink: str, observer=None):
......
......@@ -693,7 +693,7 @@
"spaminator"
],
"id": "follow",
"json": "[\"follow\",{\"follower\":\"spaminator\",\"following\":[\"lyubovbar\",\"zaitsevalesyaa\",\"kingscrown\",\"trevonjb\",\"craig-grant\",\"ned\"],\"what\":[\"blacklist\"]}]"
"json": "[\"follow\",{\"follower\":\"spaminator\",\"following\":[\"lyubovbar\",\"zaitsevalesyaa\",\"kingscrown\",\"trevonjb\",\"craig-grant\",\"ned\",\"mindhunter\"],\"what\":[\"blacklist\"]}]"
}
},
{
......
***block 4999999***
custom_json_operation("[\"reblog\",{\"account\":\"funny\",\"author\":\"steemit\",\"permlink\":\"firstpost\"}]") - very old post
custom_json_operation("[\"reblog\",{\"account\":\"funny\",\"author\":\"steak\",\"permlink\":\"streak-test\"}]") - deleted post (should not be reblogged)
custom_json_operation("[\"reblog\",{\"account\":\"funny\",\"author\":\"dollarvigilante\",\"permlink\":\"another-billionaire-warns-of-catastrophic-depths-not-seen-in-5-000-years-and-emphasizes-gold\"}]") - fresh post
\ No newline at end of file