Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 127-eliminate-more-references-to-hive_posts_view
  • 221-vacuum-hivemind-tables
  • 267-rebased-onto-develop
  • 676-as-tiny-assets
  • 72-postgres-warning-about-wrong-collation-in-recursive_deps-2
  • abw_ecv_op_experiment
  • abw_max_retries
  • abw_post_delete_fix
  • abw_rshares_experiment
  • add-git-to-requirements
  • add-vote-info
  • arc-get-content-deleted
  • as-tmp-to-remove
  • asuch-limit-follows
  • asuch-postgrest-fixing-problems
  • asuch-replace-python-with-sql-get-follow-count
  • asuch-tmp-wip-condenser-get-blog
  • autoexplain-python
  • bridge_api.get_ranked_posts_fixes
  • bw_1_27_5rc8_2master
  • bw_develop-haf-rebase
  • bw_docker_supplement
  • bw_entrypoint_signal_handler_fix
  • bw_haf_compat_sync
  • bw_hafah_datasource_test
  • bw_master2develop
  • bw_mi/hivemind_wit_sa_btracker_rebase
  • bw_rebased_develop-haf
  • bw_restore_log_memory_usage_call
  • bw_simplify_blacklisted_by_observer_view
  • bw_temp_hived_source_node_verification
  • bw_update_posts_rshares_speedup
  • bw_v1_27_5_0_0_rc0
  • change-python-limits
  • cherry-pick-5dd1da34
  • cherry-pick-98eaf112
  • complete-refactor
  • db-upgrade-assert
  • deployed_20200917
  • deployed_20200928
  • deployed_20200928_pure
  • deployed_20200928_reversible_ops
  • deployed_fixes_2_develop
  • develop
  • develop-haf-backup
  • dk-benchmarks-ci-improvements
  • dk-communities-unit-tests
  • dk-get-ids-for-accounts-hotfix
  • dk-issue-3-concurrent-block-query
  • dk-list-votes-pre24
  • dk-migration-script-tags-support
  • dk-num-block-hive-feed-cache
  • dk-readme-update
  • dk-reputation_api_support
  • dk-revert-black-lists
  • dk-sql-file-list
  • dk-sql-script-executor
  • dk-sql-scripts-from-schema
  • dk-xdist-and-time
  • dn-autovacuum
  • dn-default-autovacuum
  • dn-testing
  • dn_get_block_range
  • dn_parallel_safe
  • dn_prof
  • doc-fix
  • dockerfile-update-fix
  • emf-limit-follows
  • enum_block_operations-support
  • feature/beneficiaries_communities
  • feature/hive_votes_no_index
  • feature/mute-reason-test
  • feature/mute-reason_rebase
  • feature/new_communities_type_old
  • feature/new_community_types
  • feature/role-only-if-subscribed-test1
  • fix-duplicate-pinned-posts
  • fixing-tests-with-limits
  • follow-deltas
  • follow-redesign
  • follow-redesign-speedups
  • follow-redesign-tests
  • follow_api_tests
  • get-discussion-experiment
  • hivemind_testers
  • imwatsi-first-steps
  • jes2850-decentralized-lists
  • jsalyers-add-a-cascade
  • jsalyers-fix-muting-for-reblogs
  • jsalyers-fix-muting-on-bridge-get-discussion
  • jsalyers-muting-v2
  • jsalyers-test-mute-changes
  • kbotor/backup/building-hivemind-from-other-repos
  • kbotor/building-hivemind-from-other-repos
  • kbotor/ci-rewrite-for-parallel-replay
  • km_ah_api
  • km_get_content_2_0
  • km_get_content_fill_missing2deployed
  • km_history
  • km_improved_log_gathering
  • 0.25.4
  • 1.25.0rc0
  • 1.25.2rc
  • 1.26.0
  • 1.26.1
  • 1.26.2
  • 1.26.3
  • 1.27.0.dev0
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.3.0.0
  • 1.27.3.0.0dev11
  • 1.27.3.0.0dev12
  • 1.27.3.0.0dev7
  • 1.27.5
  • 1.27.5.0.0rc7
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc3
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • Before-dk-issue-3
  • Full-sync
  • Full-sync-20200928
  • Full-sync-20201026
  • ScheduledBenchmarkTesting_18_Aug
  • env/staging-permissions
  • full_hive_sync_17_05_2021
  • git_versioning_support
  • hivemind_ci_support
  • tmp-logs
  • v1.24-RC
  • v1.24.0
  • v1.24.1
  • v1.24.1-RC
  • v1.24.2
  • v1.25.1
  • v1.25.2
  • v1.25.3
  • v1.25.4
  • v1.26.0
  • v1.27.4.0.0
  • v1.27.4.0.0dev1
  • v1.27.4.0.0dev2
160 results

Target

Select target project
  • hive/hivemind
1 result
Select Git revision
  • 127-eliminate-more-references-to-hive_posts_view
  • 221-vacuum-hivemind-tables
  • 267-rebased-onto-develop
  • 676-as-tiny-assets
  • 72-postgres-warning-about-wrong-collation-in-recursive_deps-2
  • abw_ecv_op_experiment
  • abw_max_retries
  • abw_post_delete_fix
  • abw_rshares_experiment
  • add-git-to-requirements
  • add-vote-info
  • arc-get-content-deleted
  • as-tmp-to-remove
  • asuch-limit-follows
  • asuch-postgrest-fixing-problems
  • asuch-replace-python-with-sql-get-follow-count
  • asuch-tmp-wip-condenser-get-blog
  • autoexplain-python
  • bridge_api.get_ranked_posts_fixes
  • bw_1_27_5rc8_2master
  • bw_develop-haf-rebase
  • bw_docker_supplement
  • bw_entrypoint_signal_handler_fix
  • bw_haf_compat_sync
  • bw_hafah_datasource_test
  • bw_master2develop
  • bw_mi/hivemind_wit_sa_btracker_rebase
  • bw_rebased_develop-haf
  • bw_restore_log_memory_usage_call
  • bw_simplify_blacklisted_by_observer_view
  • bw_temp_hived_source_node_verification
  • bw_update_posts_rshares_speedup
  • bw_v1_27_5_0_0_rc0
  • change-python-limits
  • cherry-pick-5dd1da34
  • cherry-pick-98eaf112
  • complete-refactor
  • db-upgrade-assert
  • deployed_20200917
  • deployed_20200928
  • deployed_20200928_pure
  • deployed_20200928_reversible_ops
  • deployed_fixes_2_develop
  • develop
  • develop-haf-backup
  • dk-benchmarks-ci-improvements
  • dk-communities-unit-tests
  • dk-get-ids-for-accounts-hotfix
  • dk-issue-3-concurrent-block-query
  • dk-list-votes-pre24
  • dk-migration-script-tags-support
  • dk-num-block-hive-feed-cache
  • dk-readme-update
  • dk-reputation_api_support
  • dk-revert-black-lists
  • dk-sql-file-list
  • dk-sql-script-executor
  • dk-sql-scripts-from-schema
  • dk-xdist-and-time
  • dn-autovacuum
  • dn-default-autovacuum
  • dn-testing
  • dn_get_block_range
  • dn_parallel_safe
  • dn_prof
  • doc-fix
  • dockerfile-update-fix
  • emf-limit-follows
  • enum_block_operations-support
  • feature/beneficiaries_communities
  • feature/hive_votes_no_index
  • feature/mute-reason-test
  • feature/mute-reason_rebase
  • feature/new_communities_type_old
  • feature/new_community_types
  • feature/role-only-if-subscribed-test1
  • fix-duplicate-pinned-posts
  • fixing-tests-with-limits
  • follow-deltas
  • follow-redesign
  • follow-redesign-speedups
  • follow-redesign-tests
  • follow_api_tests
  • get-discussion-experiment
  • hivemind_testers
  • imwatsi-first-steps
  • jes2850-decentralized-lists
  • jsalyers-add-a-cascade
  • jsalyers-fix-muting-for-reblogs
  • jsalyers-fix-muting-on-bridge-get-discussion
  • jsalyers-muting-v2
  • jsalyers-test-mute-changes
  • kbotor/backup/building-hivemind-from-other-repos
  • kbotor/building-hivemind-from-other-repos
  • kbotor/ci-rewrite-for-parallel-replay
  • km_ah_api
  • km_get_content_2_0
  • km_get_content_fill_missing2deployed
  • km_history
  • km_improved_log_gathering
  • 0.25.4
  • 1.25.0rc0
  • 1.25.2rc
  • 1.26.0
  • 1.26.1
  • 1.26.2
  • 1.26.3
  • 1.27.0.dev0
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.3.0.0
  • 1.27.3.0.0dev11
  • 1.27.3.0.0dev12
  • 1.27.3.0.0dev7
  • 1.27.5
  • 1.27.5.0.0rc7
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc3
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • Before-dk-issue-3
  • Full-sync
  • Full-sync-20200928
  • Full-sync-20201026
  • ScheduledBenchmarkTesting_18_Aug
  • env/staging-permissions
  • full_hive_sync_17_05_2021
  • git_versioning_support
  • hivemind_ci_support
  • tmp-logs
  • v1.24-RC
  • v1.24.0
  • v1.24.1
  • v1.24.1-RC
  • v1.24.2
  • v1.25.1
  • v1.25.2
  • v1.25.3
  • v1.25.4
  • v1.26.0
  • v1.27.4.0.0
  • v1.27.4.0.0dev1
  • v1.27.4.0.0dev2
160 results
Show changes
Commits on Source (15)
Showing
with 356 additions and 387 deletions
...@@ -167,7 +167,7 @@ class DbState: ...@@ -167,7 +167,7 @@ class DbState:
index.drop(engine) index.drop(engine)
end_time = perf_counter() end_time = perf_counter()
elapsed_time = end_time - time_start elapsed_time = end_time - time_start
log.info("Index {} dropped in time {} s".format(index.name, elapsed_time)) log.info("Index %s dropped in time %.4f s", index.name, elapsed_time)
except sqlalchemy.exc.ProgrammingError as ex: except sqlalchemy.exc.ProgrammingError as ex:
log.warning("Ignoring ex: {}".format(ex)) log.warning("Ignoring ex: {}".format(ex))
...@@ -176,7 +176,7 @@ class DbState: ...@@ -176,7 +176,7 @@ class DbState:
index.create(engine) index.create(engine)
end_time = perf_counter() end_time = perf_counter()
elapsed_time = end_time - time_start elapsed_time = end_time - time_start
log.info("Index {} created in time {} s".format(index.name, elapsed_time)) log.info("Index %s created in time %.4f s", index.name, elapsed_time)
@classmethod @classmethod
def before_initial_sync(cls, last_imported_block, hived_head_block): def before_initial_sync(cls, last_imported_block, hived_head_block):
...@@ -252,7 +252,7 @@ class DbState: ...@@ -252,7 +252,7 @@ class DbState:
row = DbState.db().query_row(sql) row = DbState.db().query_row(sql)
time_end = perf_counter() time_end = perf_counter()
log.info("[INIT] update_hive_posts_children_count executed in %fs", time_end - time_start) log.info("[INIT] update_hive_posts_children_count executed in %.4fs", time_end - time_start)
time_start = perf_counter() time_start = perf_counter()
...@@ -263,7 +263,7 @@ class DbState: ...@@ -263,7 +263,7 @@ class DbState:
row = DbState.db().query_row(sql) row = DbState.db().query_row(sql)
time_end = perf_counter() time_end = perf_counter()
log.info("[INIT] update_hive_posts_root_id executed in %fs", time_end - time_start) log.info("[INIT] update_hive_posts_root_id executed in %.4fs", time_end - time_start)
time_start = perf_counter() time_start = perf_counter()
...@@ -274,21 +274,21 @@ class DbState: ...@@ -274,21 +274,21 @@ class DbState:
row = DbState.db().query_row(sql) row = DbState.db().query_row(sql)
time_end = perf_counter() time_end = perf_counter()
log.info("[INIT] update_hive_posts_api_helper executed in %fs", time_end - time_start) log.info("[INIT] update_hive_posts_api_helper executed in %.4fs", time_end - time_start)
time_start = perf_counter() time_start = perf_counter()
update_hot_and_tranding_for_block_range(last_imported_block, current_imported_block) update_hot_and_tranding_for_block_range(last_imported_block, current_imported_block)
time_end = perf_counter() time_end = perf_counter()
log.info("[INIT] update_all_hot_and_tranding executed in %fs", time_end - time_start) log.info("[INIT] update_all_hot_and_tranding executed in %.4fs", time_end - time_start)
time_start = perf_counter() time_start = perf_counter()
update_active_starting_from_posts_on_block(last_imported_block, current_imported_block) update_active_starting_from_posts_on_block(last_imported_block, current_imported_block)
time_end = perf_counter() time_end = perf_counter()
log.info("[INIT] update_all_posts_active executed in %fs", time_end - time_start) log.info("[INIT] update_all_posts_active executed in %.4fs", time_end - time_start)
time_start = perf_counter() time_start = perf_counter()
...@@ -298,7 +298,7 @@ class DbState: ...@@ -298,7 +298,7 @@ class DbState:
DbState.db().query_no_return(sql) DbState.db().query_no_return(sql)
time_end = perf_counter() time_end = perf_counter()
log.info("[INIT] update_feed_cache executed in %fs", time_end - time_start) log.info("[INIT] update_feed_cache executed in %.4fs", time_end - time_start)
time_start = perf_counter() time_start = perf_counter()
sql = """ sql = """
...@@ -306,12 +306,12 @@ class DbState: ...@@ -306,12 +306,12 @@ class DbState:
""".format(last_imported_block, current_imported_block) """.format(last_imported_block, current_imported_block)
DbState.db().query_no_return(sql) DbState.db().query_no_return(sql)
time_end = perf_counter() time_end = perf_counter()
log.info("[INIT] update_hive_posts_mentions executed in %fs", time_end - time_start) log.info("[INIT] update_hive_posts_mentions executed in %.4fs", time_end - time_start)
time_start = perf_counter() time_start = perf_counter()
PayoutStats.generate() PayoutStats.generate()
time_end = perf_counter() time_end = perf_counter()
log.info("[INIT] filling payout_stats_view executed in %fs", time_end - time_start) log.info("[INIT] filling payout_stats_view executed in %.4fs", time_end - time_start)
time_start = perf_counter() time_start = perf_counter()
sql = """ sql = """
...@@ -319,12 +319,12 @@ class DbState: ...@@ -319,12 +319,12 @@ class DbState:
""".format(last_imported_block, current_imported_block) """.format(last_imported_block, current_imported_block)
DbState.db().query_no_return(sql) DbState.db().query_no_return(sql)
time_end = perf_counter() time_end = perf_counter()
log.info("[INIT] update_account_reputations executed in %fs", time_end - time_start) log.info("[INIT] update_account_reputations executed in %.4fs", time_end - time_start)
time_start = perf_counter() time_start = perf_counter()
update_communities_posts_and_rank() update_communities_posts_and_rank()
time_end = perf_counter() time_end = perf_counter()
log.info("[INIT] update_communities_posts_and_rank executed in %fs", time_end - time_start) log.info("[INIT] update_communities_posts_and_rank executed in %.4fs", time_end - time_start)
# Update a block num immediately # Update a block num immediately
DbState.db().query_no_return("UPDATE hive_state SET block_num = :block_num", block_num = current_imported_block) DbState.db().query_no_return("UPDATE hive_state SET block_num = :block_num", block_num = current_imported_block)
......
...@@ -963,12 +963,13 @@ def setup(db): ...@@ -963,12 +963,13 @@ def setup(db):
reputation BIGINT reputation BIGINT
); );
DROP FUNCTION IF EXISTS find_votes( character varying, character varying ) DROP FUNCTION IF EXISTS find_votes( character varying, character varying, int )
; ;
CREATE OR REPLACE FUNCTION public.find_votes CREATE OR REPLACE FUNCTION public.find_votes
( (
in _AUTHOR hive_accounts.name%TYPE, in _AUTHOR hive_accounts.name%TYPE,
in _PERMLINK hive_permlink_data.permlink%TYPE in _PERMLINK hive_permlink_data.permlink%TYPE,
in _LIMIT INT
) )
RETURNS SETOF database_api_vote RETURNS SETOF database_api_vote
LANGUAGE 'plpgsql' LANGUAGE 'plpgsql'
...@@ -997,6 +998,7 @@ def setup(db): ...@@ -997,6 +998,7 @@ def setup(db):
v.post_id = _POST_ID v.post_id = _POST_ID
ORDER BY ORDER BY
voter_id voter_id
LIMIT _LIMIT
); );
END END
...@@ -2199,7 +2201,10 @@ def setup(db): ...@@ -2199,7 +2201,10 @@ def setup(db):
"delete_hive_posts_mentions.sql", "delete_hive_posts_mentions.sql",
"head_block_time.sql", "head_block_time.sql",
"notifications_view.sql", "notifications_view.sql",
"get_number_of_unreaded_notifications.sql" "get_number_of_unreaded_notifications.sql",
"bridge_get_account_posts_by_comments.sql",
"bridge_get_account_posts_by_payout.sql",
"bridge_get_account_posts_by_posts.sql"
] ]
from os.path import dirname, realpath from os.path import dirname, realpath
dir_path = dirname(realpath(__file__)) dir_path = dirname(realpath(__file__))
......
DROP FUNCTION IF EXISTS bridge_get_account_posts_by_comments;
CREATE FUNCTION bridge_get_account_posts_by_comments( in _account VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
AS
$function$
DECLARE
__account_id INT;
__post_id INT;
BEGIN
__account_id = find_account_id( _account, True );
__post_id = find_comment_id( _author, _permlink, True );
RETURN QUERY SELECT
hp.id,
hp.author,
hp.parent_author,
hp.author_rep,
hp.root_title,
hp.beneficiaries,
hp.max_accepted_payout,
hp.percent_hbd,
hp.url,
hp.permlink,
hp.parent_permlink_or_category,
hp.title,
hp.body,
hp.category,
hp.depth,
hp.promoted,
hp.payout,
hp.pending_payout,
hp.payout_at,
hp.is_paidout,
hp.children,
hp.votes,
hp.created_at,
hp.updated_at,
hp.rshares,
hp.abs_rshares,
hp.json,
hp.is_hidden,
hp.is_grayed,
hp.total_votes,
hp.sc_trend,
hp.role_title,
hp.community_title,
hp.role_id,
hp.is_pinned,
hp.curator_payout_value
FROM
hive_posts_view hp
WHERE
hp.author_id = __account_id AND hp.depth > 0 AND ( __post_id = 0 OR hp.id < __post_id )
ORDER BY hp.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_account_posts_by_payout;
CREATE FUNCTION bridge_get_account_posts_by_payout( in _account VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
AS
$function$
DECLARE
__account_id INT;
__post_id INT;
__payout_limit hive_posts.payout%TYPE;
BEGIN
__account_id = find_account_id( _account, True );
__post_id = find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT ( hp.payout + hp.pending_payout ) INTO __payout_limit FROM hive_posts hp WHERE hp.id = __post_id;
END IF;
RETURN QUERY SELECT
hp.id,
hp.author,
hp.parent_author,
hp.author_rep,
hp.root_title,
hp.beneficiaries,
hp.max_accepted_payout,
hp.percent_hbd,
hp.url,
hp.permlink,
hp.parent_permlink_or_category,
hp.title,
hp.body,
hp.category,
hp.depth,
hp.promoted,
hp.payout,
hp.pending_payout,
hp.payout_at,
hp.is_paidout,
hp.children,
hp.votes,
hp.created_at,
hp.updated_at,
hp.rshares,
hp.abs_rshares,
hp.json,
hp.is_hidden,
hp.is_grayed,
hp.total_votes,
hp.sc_trend,
hp.role_title,
hp.community_title,
hp.role_id,
hp.is_pinned,
hp.curator_payout_value
FROM
hive_posts_view hp
WHERE
hp.author_id = __account_id AND NOT hp.is_paidout
AND ( __post_id = 0 OR ( hp.payout + hp.pending_payout ) < __payout_limit OR ( ( hp.payout + hp.pending_payout ) = __payout_limit AND hp.id < __post_id ) )
ORDER BY ( hp.payout + hp.pending_payout ) DESC, hp.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP FUNCTION IF EXISTS bridge_get_account_posts_by_posts;
CREATE FUNCTION bridge_get_account_posts_by_posts( in _account VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
RETURNS SETOF bridge_api_post
AS
$function$
DECLARE
__account_id INT;
__post_id INT;
BEGIN
__account_id = find_account_id( _account, True );
__post_id = find_comment_id( _author, _permlink, True );
RETURN QUERY SELECT
hp.id,
hp.author,
hp.parent_author,
hp.author_rep,
hp.root_title,
hp.beneficiaries,
hp.max_accepted_payout,
hp.percent_hbd,
hp.url,
hp.permlink,
hp.parent_permlink_or_category,
hp.title,
hp.body,
hp.category,
hp.depth,
hp.promoted,
hp.payout,
hp.pending_payout,
hp.payout_at,
hp.is_paidout,
hp.children,
hp.votes,
hp.created_at,
hp.updated_at,
hp.rshares,
hp.abs_rshares,
hp.json,
hp.is_hidden,
hp.is_grayed,
hp.total_votes,
hp.sc_trend,
hp.role_title,
hp.community_title,
hp.role_id,
hp.is_pinned,
hp.curator_payout_value
FROM
hive_posts_view hp
WHERE
hp.author_id = __account_id AND hp.depth = 0 AND ( __post_id = 0 OR hp.id < __post_id )
ORDER BY hp.id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
...@@ -7,8 +7,8 @@ DECLARE ...@@ -7,8 +7,8 @@ DECLARE
__post_id INT; __post_id INT;
__account_id INT; __account_id INT;
BEGIN BEGIN
__post_id = find_comment_id( start_author, start_permlink, True );
__account_id = find_account_id( _account, True ); __account_id = find_account_id( _account, True );
__post_id = find_comment_id( start_author, start_permlink, True );
RETURN QUERY SELECT RETURN QUERY SELECT
hpr.id as id hpr.id as id
FROM hive_posts hpr FROM hive_posts hpr
......
...@@ -26,6 +26,7 @@ from hive.utils.post_active import update_active_starting_from_posts_on_block ...@@ -26,6 +26,7 @@ from hive.utils.post_active import update_active_starting_from_posts_on_block
from hive.server.common.payout_stats import PayoutStats from hive.server.common.payout_stats import PayoutStats
from hive.server.common.mentions import Mentions from hive.server.common.mentions import Mentions
from hive.utils.timer import time_it
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -79,18 +80,6 @@ class Blocks: ...@@ -79,18 +80,6 @@ class Blocks:
PayoutStats.setup_own_db_access(sharedDbAdapter) PayoutStats.setup_own_db_access(sharedDbAdapter)
Mentions.setup_own_db_access(sharedDbAdapter) Mentions.setup_own_db_access(sharedDbAdapter)
@classmethod
def setup_shared_db_access(cls, sharedDbAdapter):
PostDataCache.setup_shared_db_access(sharedDbAdapter)
Reputations.setup_shared_db_access(sharedDbAdapter)
Votes.setup_shared_db_access(sharedDbAdapter)
Tags.setup_shared_db_access(sharedDbAdapter)
Follow.setup_shared_db_access(sharedDbAdapter)
Posts.setup_shared_db_access(sharedDbAdapter)
Reblog.setup_shared_db_access(sharedDbAdapter)
Notify.setup_shared_db_access(sharedDbAdapter)
Accounts.setup_shared_db_access(sharedDbAdapter)
@classmethod @classmethod
def head_num(cls): def head_num(cls):
"""Get hive's head block number.""" """Get hive's head block number."""
...@@ -103,28 +92,6 @@ class Blocks: ...@@ -103,28 +92,6 @@ class Blocks:
sql = "SELECT created_at FROM hive_blocks ORDER BY num DESC LIMIT 1" sql = "SELECT created_at FROM hive_blocks ORDER BY num DESC LIMIT 1"
return str(DB.query_one(sql) or '') return str(DB.query_one(sql) or '')
@classmethod
def process(cls, block, block_num, vops_in_block):
"""Process a single block. Always wrap in a transaction!"""
time_start = perf_counter()
ret = cls._process(block, vops_in_block)
assert ret == block_num, "{} != {}".format(block_num, ret)
cls._flush_blocks()
PostDataCache.flush()
Tags.flush()
Votes.flush()
Posts.flush()
Reblog.flush()
follows = Follow.flush()
Notify.flush()
Reputations.flush()
accts = Accounts.flush()
cls.on_live_blocks_processed(block_num, block_num)
time_end = perf_counter()
log.info("[PROCESS BLOCK] %fs", time_end - time_start)
return follows, accts
@classmethod @classmethod
def process_multi(cls, blocks, vops, is_initial_sync): def process_multi(cls, blocks, vops, is_initial_sync):
"""Batch-process blocks; wrapped in a transaction.""" """Batch-process blocks; wrapped in a transaction."""
...@@ -457,18 +424,25 @@ class Blocks: ...@@ -457,18 +424,25 @@ class Blocks:
# TODO: manually re-process here the blocks which were just popped. # TODO: manually re-process here the blocks which were just popped.
@classmethod @classmethod
@time_it
def on_live_blocks_processed( cls, first_block, last_block ): def on_live_blocks_processed( cls, first_block, last_block ):
"""Is invoked when processing of block range is done and received """Is invoked when processing of block range is done and received
informations from hived are already stored in db informations from hived are already stored in db
""" """
update_hot_and_tranding_for_block_range( first_block, last_block ) update_hot_and_tranding_for_block_range( first_block, last_block )
update_active_starting_from_posts_on_block( first_block, last_block ) update_active_starting_from_posts_on_block( first_block, last_block )
DB.query_no_return("SELECT update_hive_posts_children_count({}, {})".format(first_block, last_block)) queries = [
DB.query_no_return("SELECT update_hive_posts_root_id({},{})".format(first_block, last_block)) "SELECT update_hive_posts_children_count({}, {})".format(first_block, last_block),
DB.query_no_return("SELECT update_hive_posts_api_helper({},{})".format(first_block, last_block)) "SELECT update_hive_posts_root_id({},{})".format(first_block, last_block),
DB.query_no_return("SELECT update_feed_cache({}, {})".format(first_block, last_block)) "SELECT update_hive_posts_api_helper({},{})".format(first_block, last_block),
DB.query_no_return("SELECT update_hive_posts_mentions({}, {})".format(first_block, last_block)) "SELECT update_feed_cache({}, {})".format(first_block, last_block),
DB.query_no_return("SELECT update_account_reputations({}, {})".format(first_block, last_block)) "SELECT update_hive_posts_mentions({}, {})".format(first_block, last_block),
"SELECT update_account_reputations({}, {})".format(first_block, last_block)
]
for query in queries:
time_start = perf_counter()
DB.query_no_return(query)
log.info("%s executed in: %.4f s", query, perf_counter() - time_start)
...@@ -5,17 +5,10 @@ class DbAdapterHolder(object): ...@@ -5,17 +5,10 @@ class DbAdapterHolder(object):
db = None db = None
_inside_tx = False _inside_tx = False
_use_tx = True
@classmethod
def setup_shared_db_access(cls, sharedDb):
cls.db = sharedDb
cls._use_tx = False
@classmethod @classmethod
def setup_own_db_access(cls, sharedDb): def setup_own_db_access(cls, sharedDb):
cls.db = sharedDb.clone() cls.db = sharedDb.clone()
cls._use_tx = True
@classmethod @classmethod
def tx_active(cls): def tx_active(cls):
...@@ -23,12 +16,10 @@ class DbAdapterHolder(object): ...@@ -23,12 +16,10 @@ class DbAdapterHolder(object):
@classmethod @classmethod
def beginTx(cls): def beginTx(cls):
if cls._use_tx: cls.db.query("START TRANSACTION")
cls.db.query("START TRANSACTION") cls._inside_tx = True
cls._inside_tx = True
@classmethod @classmethod
def commitTx(cls): def commitTx(cls):
if cls._use_tx: cls.db.query("COMMIT")
cls.db.query("COMMIT") cls._inside_tx = False
cls._inside_tx = False
...@@ -141,11 +141,11 @@ def _block_consumer(node, blocksQueue, vopsQueue, is_initial_sync, lbound, uboun ...@@ -141,11 +141,11 @@ def _block_consumer(node, blocksQueue, vopsQueue, is_initial_sync, lbound, uboun
timer.batch_finish(len(blocks)) timer.batch_finish(len(blocks))
time_current = perf() time_current = perf()
prefix = ("[SYNC] Got block %d @ %s" % ( prefix = ("[INITIAL SYNC] Got block %d @ %s" % (
to - 1, blocks[-1]['timestamp'])) to - 1, blocks[-1]['timestamp']))
log.info(timer.batch_status(prefix)) log.info(timer.batch_status(prefix))
log.info("[SYNC] Time elapsed: %fs", time_current - time_start) log.info("[INITIAL SYNC] Time elapsed: %fs", time_current - time_start)
log.info("[SYNC] Current system time: %s", datetime.now().strftime("%H:%M:%S")) log.info("[INITIAL SYNC] Current system time: %s", datetime.now().strftime("%H:%M:%S"))
rate = minmax(rate, len(blocks), time_current - wait_time_1, lbound) rate = minmax(rate, len(blocks), time_current - wait_time_1, lbound)
if block_end - block_start > 1.0 or is_debug: if block_end - block_start > 1.0 or is_debug:
...@@ -261,7 +261,7 @@ class Sync: ...@@ -261,7 +261,7 @@ class Sync:
else: else:
# recover from fork # recover from fork
Blocks.verify_head(self._steem) Blocks.verify_head(self._steem)
Blocks.setup_shared_db_access(self._db)
self._update_chain_state() self._update_chain_state()
if self._conf.get('test_max_block'): if self._conf.get('test_max_block'):
...@@ -312,7 +312,7 @@ class Sync: ...@@ -312,7 +312,7 @@ class Sync:
_node_data_provider(self, is_initial_sync, lbound, ubound, chunk_size) _node_data_provider(self, is_initial_sync, lbound, ubound, chunk_size)
return return
log.info("[SYNC] start block %d, +%d to sync", lbound, count) log.info("[FAST SYNC] start block %d, +%d to sync", lbound, count)
timer = Timer(count, entity='block', laps=['rps', 'wps']) timer = Timer(count, entity='block', laps=['rps', 'wps'])
while lbound < ubound: while lbound < ubound:
timer.batch_start() timer.batch_start()
...@@ -329,10 +329,18 @@ class Sync: ...@@ -329,10 +329,18 @@ class Sync:
Blocks.process_multi(blocks, prepared_vops, is_initial_sync) Blocks.process_multi(blocks, prepared_vops, is_initial_sync)
timer.batch_finish(len(blocks)) timer.batch_finish(len(blocks))
_prefix = ("[SYNC] Got block %d @ %s" % ( otm = OPSM.log_current("Operations present in the processed blocks")
ftm = FSM.log_current("Flushing times")
_prefix = ("[FAST SYNC] Got block %d @ %s" % (
to - 1, blocks[-1]['timestamp'])) to - 1, blocks[-1]['timestamp']))
log.info(timer.batch_status(_prefix)) log.info(timer.batch_status(_prefix))
OPSM.next_blocks()
FSM.next_blocks()
PC.broadcast(BroadcastObject('sync_current_block', to, 'blocks'))
def listen(self): def listen(self):
"""Live (block following) mode.""" """Live (block following) mode."""
trail_blocks = self._conf.get('trail_blocks') trail_blocks = self._conf.get('trail_blocks')
...@@ -349,35 +357,38 @@ class Sync: ...@@ -349,35 +357,38 @@ class Sync:
start_time = perf() start_time = perf()
num = int(block['block_id'][:8], base=16) num = int(block['block_id'][:8], base=16)
log.info("[LIVE] About to process block %d", num) log.info("[LIVE SYNC] =====> About to process block %d", num)
vops = steemd.enum_virtual_ops(self._conf, num, num + 1) vops = steemd.enum_virtual_ops(self._conf, num, num + 1)
prepared_vops = prepare_vops(vops) prepared_vops = prepare_vops(vops)
num_vops = len(prepared_vops[num]) if num in prepared_vops else 0 Blocks.process_multi([block], prepared_vops, False)
otm = OPSM.log_current("Operations present in the processed blocks")
self._db.query("START TRANSACTION") ftm = FSM.log_current("Flushing times")
follows, accounts = Blocks.process(block, num, prepared_vops)
self._db.query("COMMIT")
ms = (perf() - start_time) * 1000 ms = (perf() - start_time) * 1000
log.info("[LIVE] Processed block %d at %s --% 4d txs, % 3d follows, % 3d accounts, % 4d vops" log.info("[LIVE SYNC] <===== Processed block %d at %s --% 4d txs"
" --% 5dms%s", num, block['timestamp'], len(block['transactions']), " --% 5dms%s", num, block['timestamp'], len(block['transactions']),
follows, accounts, num_vops, ms, ' SLOW' if ms > 1000 else '') ms, ' SLOW' if ms > 1000 else '')
log.info("[LIVE SYNC] Current system time: %s", datetime.now().strftime("%H:%M:%S"))
if num % 1200 == 0: #1hr if num % 1200 == 0: #1hr
log.warning("head block %d @ %s", num, block['timestamp']) log.warning("head block %d @ %s", num, block['timestamp'])
log.info("[LIVE] hourly stats") log.info("[LIVE SYNC] hourly stats")
if num % 1200 == 0: #1hour if num % 1200 == 0: #1hour
log.info("[LIVE] filling payout_stats_view executed") log.info("[LIVE SYNC] filling payout_stats_view executed")
with ThreadPoolExecutor(max_workers=2) as executor: with ThreadPoolExecutor(max_workers=2) as executor:
executor.submit(PayoutStats.generate) executor.submit(PayoutStats.generate)
executor.submit(Mentions.refresh) executor.submit(Mentions.refresh)
if num % 200 == 0: #10min if num % 200 == 0: #10min
update_communities_posts_and_rank() update_communities_posts_and_rank()
if num % 20 == 0: #1min if num % 20 == 0: #1min
self._update_chain_state() self._update_chain_state()
PC.broadcast(BroadcastObject('sync_current_block', num, 'blocks'))
FSM.next_blocks()
OPSM.next_blocks()
# refetch dynamic_global_properties, feed price, etc # refetch dynamic_global_properties, feed price, etc
def _update_chain_state(self): def _update_chain_state(self):
"""Update basic state props (head block, feed price) in db.""" """Update basic state props (head block, feed price) in db."""
......
...@@ -4,203 +4,17 @@ from hive.server.common.helpers import last_month ...@@ -4,203 +4,17 @@ from hive.server.common.helpers import last_month
# pylint: disable=too-many-lines # pylint: disable=too-many-lines
DEFAULT_CID = 1317453
PAYOUT_WINDOW = "now() + interval '12 hours' AND now() + interval '36 hours'"
async def _get_post_id(db, author, permlink): async def _get_post_id(db, author, permlink):
"""Get post_id from hive db. (does NOT filter on is_deleted)""" """Get post_id from hive db."""
sql = """ post_id = await db.query_one("SELECT find_comment_id( :a, :p, True )", a=author, p=permlink)
SELECT
hp.id
FROM hive_posts hp
INNER JOIN hive_accounts ha_a ON ha_a.id = hp.author_id
INNER JOIN hive_permlink_data hpd_p ON hpd_p.id = hp.permlink_id
WHERE ha_a.name = :author AND hpd_p.permlink = :permlink"""
post_id = await db.query_one(sql, author=author, permlink=permlink)
assert post_id, 'invalid author/permlink'
return post_id return post_id
async def _get_account_id(db, name): async def _get_account_id(db, name):
"""Get account id from hive db.""" """Get account id from hive db."""
assert name, 'no account name specified' assert name, 'no account name specified'
_id = await db.query_one("SELECT id FROM hive_accounts WHERE name = :n", n=name) _id = await db.query_one("SELECT find_account_id( :n, True )", n=name)
assert _id, "account not found: `%s`" % name
return _id
async def _get_community_id(db, name):
"""Get community id from hive db."""
assert name, 'no comm name specified'
_id = await db.query_one("SELECT id FROM hive_communities WHERE name = :n", n=name)
return _id return _id
#TODO: async def posts_by_ranked
async def pids_by_ranked(db, sort, start_author, start_permlink, limit, tag, observer_id=None):
"""Get a list of post_ids for a given posts query.
if `tag` is blank: global trending
if `tag` is `my`: personal trending
if `tag` is `hive-*`: community trending
else `tag` is a tag: tag trending
Valid `sort` values:
- legacy: trending, hot, created, promoted, payout, payout_comments
- hive: trending, hot, created, promoted, payout, muted
"""
# TODO: `payout` should limit to ~24hrs
# pylint: disable=too-many-arguments
# list of comm ids to query, if tag is comms key
cids = None
single = None
if tag == 'my':
cids = await _subscribed(db, observer_id)
if not cids: return []
elif tag == 'all':
cids = []
elif tag[:5] == 'hive-':
single = await _get_community_id(db, tag)
if single: cids = [single]
# if tag was comms key, then no tag filter
if cids is not None: tag = None
start_id = None
if start_permlink:
start_id = await _get_post_id(db, start_author, start_permlink)
if cids is None:
pids = await pids_by_category(db, tag, sort, start_id, limit)
else:
pids = await pids_by_community(db, cids, sort, start_id, limit)
# if not filtered by tag, is first page trending: prepend pinned
if not tag and not start_id and sort in ('trending', 'created'):
prepend = await _pinned(db, single or DEFAULT_CID)
for pid in prepend:
if pid in pids:
pids.remove(pid)
pids = prepend + pids
return pids
async def pids_by_community(db, ids, sort, seek_id, limit):
"""Get a list of post_ids for a given posts query.
`sort` can be trending, hot, created, promoted, payout, or payout_comments.
"""
# pylint: disable=bad-whitespace, line-too-long
# TODO: `payout` should limit to ~24hrs
definitions = {# field pending toponly gray promoted
'trending': ('sc_trend', False, True, False, False),
'hot': ('sc_hot', False, True, False, False),
'created': ('created_at', False, True, False, False),
'promoted': ('promoted', True, True, False, True),
'payout': ('payout', True, False, False, False),
'muted': ('payout', True, False, True, False)}
# validate
assert sort in definitions, 'unknown sort %s' % sort
# setup
field, pending, toponly, gray, promoted = definitions[sort]
table = 'hive_posts'
where = ["community_id IN :ids"] if ids else ["community_id IS NOT NULL AND community_id != 1337319"]
# select
if gray: where.append("is_grayed = '1'")
if not gray: where.append("is_grayed = '0'")
if toponly: where.append("depth = 0")
if pending: where.append("is_paidout = '0'")
if promoted: where.append('promoted > 0')
if sort == 'payout': where.append("payout_at BETWEEN %s" % PAYOUT_WINDOW)
# seek
if seek_id:
sval = "(SELECT %s FROM %s WHERE id = :seek_id)" % (field, table)
sql = """((%s < %s) OR (%s = %s AND id > :seek_id))"""
where.append(sql % (field, sval, field, sval))
# simpler `%s <= %s` eval has edge case: many posts with payout 0
#sql = "SELECT %s FROM %s WHERE post_id = :id)"
#seek_val = await db.query_col(sql % (field, table), id=seek_id)
#sql = """((%s < :seek_val) OR
# (%s = :seek_val AND post_id > :seek_id))"""
#where.append(sql % (field, sval, field, sval))
# build
sql = ("""SELECT id FROM %s WHERE %s
ORDER BY %s DESC, id LIMIT :limit
""" % (table, ' AND '.join(where), field))
# execute
return await db.query_col(sql, ids=tuple(ids), seek_id=seek_id, limit=limit)
async def pids_by_category(db, tag, sort, last_id, limit):
"""Get a list of post_ids for a given posts query.
`sort` can be trending, hot, created, promoted, payout, or payout_comments.
"""
# pylint: disable=bad-whitespace
assert sort in ['trending', 'hot', 'created', 'promoted',
'payout', 'payout_comments', 'muted']
params = { # field pending posts comment promoted
'trending': ('sc_trend', True, True, False, False),
'hot': ('sc_hot', True, True, False, False),
'created': ('post_id', False, True, False, False),
'promoted': ('promoted', True, False, False, True),
'payout': ('payout', True, False, False, False),
'payout_comments': ('payout', True, False, True, False),
'muted': ('payout', True, False, False, False),
}[sort]
table = 'hive_post'
field = params[0]
where = []
# primary filters
if params[1]: where.append("is_paidout = '0'")
if params[2]: where.append('depth = 0')
if params[3]: where.append('depth > 0')
if params[4]: where.append('promoted > 0')
if sort == 'muted': where.append("is_grayed = '1' AND payout > 0")
if sort == 'payout': where.append("payout_at BETWEEN %s" % PAYOUT_WINDOW)
# filter by category or tag
if tag:
if sort in ['payout', 'payout_comments']:
where.append('category_id = (SELECT id FROM hive_category_data WHERE category = :tag)')
else:
sql = """
SELECT
post_id
FROM
hive_post_tags hpt
INNER JOIN hive_tag_data htd ON hpt.tag_id=htd.id
WHERE htd.tag = :tag
"""
where.append("id IN (%s)" % sql)
if last_id:
sval = "(SELECT %s FROM %s WHERE id = :last_id)" % (field, table)
sql = """((%s < %s) OR (%s = %s AND id > :last_id))"""
where.append(sql % (field, sval, field, sval))
sql = ("""SELECT id FROM %s WHERE %s
ORDER BY %s DESC, post_id LIMIT :limit
""" % (table, ' AND '.join(where), field))
return await db.query_col(sql, tag=tag, last_id=last_id, limit=limit)
async def _subscribed(db, account_id):
sql = """SELECT community_id FROM hive_subscriptions
WHERE account_id = :account_id"""
return await db.query_col(sql, account_id=account_id)
async def _pinned(db, community_id): async def _pinned(db, community_id):
"""Get a list of pinned post `id`s in `community`.""" """Get a list of pinned post `id`s in `community`."""
sql = """SELECT id FROM hive_posts sql = """SELECT id FROM hive_posts
...@@ -310,30 +124,6 @@ async def pids_by_feed_with_reblog(db, account: str, start_author: str = '', ...@@ -310,30 +124,6 @@ async def pids_by_feed_with_reblog(db, account: str, start_author: str = '',
limit=limit, cutoff=last_month()) limit=limit, cutoff=last_month())
return [(row[0], row[1]) for row in result] return [(row[0], row[1]) for row in result]
async def pids_by_posts(db, account: str, start_permlink: str = '', limit: int = 20):
"""Get a list of post_ids representing top-level posts by an author."""
seek = ''
start_id = None
if start_permlink:
start_id = await _get_post_id(db, account, start_permlink)
if not start_id:
return []
seek = "AND id <= :start_id"
# `depth` in ORDER BY is a no-op, but forces an ix3 index scan (see #189)
sql = """
SELECT id FROM hive_posts
WHERE author = (SELECT id FROM hive_accounts WHERE name = :account) %s
AND counter_deleted = 0
AND depth = '0'
ORDER BY id DESC
LIMIT :limit
""" % seek
return await db.query_col(sql, account=account, start_id=start_id, limit=limit)
async def pids_by_comments(db, account: str, start_permlink: str = '', limit: int = 20): async def pids_by_comments(db, account: str, start_permlink: str = '', limit: int = 20):
"""Get a list of post_ids representing comments by an author.""" """Get a list of post_ids representing comments by an author."""
seek = '' seek = ''
...@@ -366,25 +156,3 @@ async def pids_by_replies(db, author: str, start_replies_author: str, start_repl ...@@ -366,25 +156,3 @@ async def pids_by_replies(db, author: str, start_replies_author: str, start_repl
sql = "SELECT get_account_post_replies( (:author)::VARCHAR, (:start_author)::VARCHAR, (:start_permlink)::VARCHAR, (:limit)::SMALLINT ) as id" sql = "SELECT get_account_post_replies( (:author)::VARCHAR, (:start_author)::VARCHAR, (:start_permlink)::VARCHAR, (:limit)::SMALLINT ) as id"
return await db.query_col( return await db.query_col(
sql, author=author, start_author=start_replies_author, start_permlink=start_replies_permlink, limit=limit) sql, author=author, start_author=start_replies_author, start_permlink=start_replies_permlink, limit=limit)
async def pids_by_payout(db, account: str, start_author: str = '',
start_permlink: str = '', limit: int = 20):
"""Get a list of post_ids for an author's blog."""
seek = ''
start_id = None
if start_permlink:
start_id = await _get_post_id(db, start_author, start_permlink)
last = "(SELECT payout FROM hive_posts WHERE id = :start_id)"
seek = ("""AND (payout < %s OR (payout = %s AND id > :start_id))"""
% (last, last))
sql = """
SELECT id
FROM hive_posts
WHERE author_id = (SELECT id FROM hive_accounts WHERE name = :account)
AND is_paidout = '0' %s
ORDER BY payout DESC, post_id
LIMIT :limit
""" % seek
return await db.query_col(sql, account=account, start_id=start_id, limit=limit)
...@@ -273,9 +273,9 @@ async def _get_ranked_posts_for_all( db, sort:str, start_author:str, start_perml ...@@ -273,9 +273,9 @@ async def _get_ranked_posts_for_all( db, sort:str, start_author:str, start_perml
async def get_ranked_posts(context, sort:str, start_author:str='', start_permlink:str='', async def get_ranked_posts(context, sort:str, start_author:str='', start_permlink:str='',
limit:int=20, tag:str=None, observer:str=None): limit:int=20, tag:str=None, observer:str=None):
"""Query posts, sorted by given method.""" """Query posts, sorted by given method."""
supported_sort_list = ['trending', 'hot', 'created', 'promoted',
assert sort in ['trending', 'hot', 'created', 'promoted', 'payout', 'payout_comments', 'muted']
'payout', 'payout_comments', 'muted'], 'invalid sort' assert sort in supported_sort_list, "Unsupported sort, valid sorts: {}".format(", ".join(supported_sort_list))
async def process_query_results( sql_result ): async def process_query_results( sql_result ):
posts = [] posts = []
...@@ -341,61 +341,72 @@ async def append_statistics_to_post(post, row, is_pinned, blacklists_for_user=No ...@@ -341,61 +341,72 @@ async def append_statistics_to_post(post, row, is_pinned, blacklists_for_user=No
post['stats']['is_pinned'] = True post['stats']['is_pinned'] = True
return post return post
async def _get_account_posts_by_blog(db, account : str, start_author : str, start_permlink : str, limit : int):
_ids = await cursor.pids_by_blog(db, account, start_author, start_permlink, limit)
posts = await load_posts(db, _ids)
for post in posts:
if post['author'] != account:
post['reblogged_by'] = [account]
return posts
async def _get_account_posts_by_feed(db, account : str, start_author : str, start_permlink : str, limit : int):
_ids = await cursor.pids_by_feed_with_reblog(db, account, start_author, start_permlink, limit)
return await load_posts_reblogs(db, _ids)
async def _get_account_posts_by_replies(db, account : str, start_author : str, start_permlink : str, limit : int):
_ids = await cursor.pids_by_replies(db, account, start_author, start_permlink, limit)
return await load_posts(db, _ids)
async def _get_final_posts(db, sort : str, account, start_author : str, start_permlink : str, limit : int):
if sort == 'blog':
return await _get_account_posts_by_blog(db, account, start_author, start_permlink, limit)
elif sort == 'feed':
return await _get_account_posts_by_feed(db, account, start_author, start_permlink, limit)
elif sort == 'replies':
return await _get_account_posts_by_replies(db, account, start_author, start_permlink, limit)
async def _get_posts(db, sort : str, account, start_author : str, start_permlink : str, limit : int, observer : str ):
observer = valid_account(observer, allow_empty=True)
if sort == 'posts':
sql = "SELECT * FROM bridge_get_account_posts_by_posts( (:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
elif sort == 'comments':
sql = "SELECT * FROM bridge_get_account_posts_by_comments( (:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
elif sort == 'payout':
sql = "SELECT * FROM bridge_get_account_posts_by_payout( (:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
sql_result = await db.query_all(sql, account=account, author=start_author, permlink=start_permlink, limit=limit )
posts = []
blacklists_for_user = None
if observer:
blacklists_for_user = await Mutes.get_blacklists_for_observer(observer, context)
for row in sql_result:
post = _bridge_post_object(row)
post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.BridgeApi)
post = await append_statistics_to_post(post, row, False, blacklists_for_user)
posts.append(post)
return posts
@return_error_info @return_error_info
async def get_account_posts(context, sort:str, account:str, start_author:str='', start_permlink:str='', async def get_account_posts(context, sort:str, account:str, start_author:str='', start_permlink:str='',
limit:int=20, observer:str=None): limit:int=20, observer:str=None):
"""Get posts for an account -- blog, feed, comments, or replies.""" """Get posts for an account -- blog, feed, comments, or replies."""
valid_sorts = ['blog', 'feed', 'posts', 'comments', 'replies', 'payout'] supported_sort_list = ['blog', 'feed', 'posts', 'comments', 'replies', 'payout']
assert sort in valid_sorts, 'invalid account sort' assert sort in supported_sort_list, "Unsupported sort, valid sorts: {}".format(", ".join(supported_sort_list))
db = context['db'] db = context['db']
account = valid_account(account)
start_author = valid_account(start_author, allow_empty=True)
start_permlink = valid_permlink(start_permlink, allow_empty=True)
start = (start_author, start_permlink)
limit = valid_limit(limit, 100, 20)
# pylint: disable=unused-variable
observer_id = await get_account_id(db, observer) if observer else None # TODO
sql = "---bridge_api.get_account_posts\n " + SQL_TEMPLATE + """ %s """ account = valid_account(account)
start_author = valid_account(start_author, allow_empty=True)
start_permlink = valid_permlink(start_permlink, allow_empty=True)
limit = valid_limit(limit, 100, 20)
if sort == 'blog': if sort == 'blog' or sort == 'feed' or sort == 'replies':
ids = await cursor.pids_by_blog(db, account, *start, limit) return await _get_final_posts(db, sort, account, start_author, start_permlink, limit)
posts = await load_posts(context['db'], ids)
for post in posts:
if post['author'] != account:
post['reblogged_by'] = [account]
return posts
elif sort == 'posts':
sql = sql % """ hp.author = :account AND hp.depth = 0 %s ORDER BY hp.id DESC LIMIT :limit"""
elif sort == 'comments':
sql = sql % """ hp.author = :account AND hp.depth > 0 %s ORDER BY hp.id DESC, hp.depth LIMIT :limit"""
elif sort == 'payout':
sql = sql % """ hp.author = :account AND NOT hp.is_paidout %s ORDER BY hp.payout DESC, hp.id LIMIT :limit"""
elif sort == 'feed':
res = await cursor.pids_by_feed_with_reblog(db, account, *start, limit)
return await load_posts_reblogs(context['db'], res)
elif sort == 'replies':
ids = await cursor.pids_by_replies(db, account, start_author, start_permlink, limit)
return await load_posts(context['db'], ids)
if start_author and start_permlink:
sql = sql % """ AND hp.id < (SELECT id FROM hive_posts WHERE author_id = (SELECT id FROM hive_accounts WHERE name = :author) AND permlink_id = (SELECT id FROM hive_permlink_data WHERE permlink = :permlink)) """
else: else:
sql = sql % """ """ return await _get_posts(db, sort, account, start_author, start_permlink, limit, observer)
posts = []
blacklists_for_user = None
if observer:
blacklists_for_user = await Mutes.get_blacklists_for_observer(observer, context)
sql_result = await db.query_all(sql, account=account, author=start_author, permlink=start_permlink, limit=limit)
for row in sql_result:
post = _bridge_post_object(row)
post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.BridgeApi)
post = await append_statistics_to_post(post, row, False, blacklists_for_user)
posts.append(post)
return posts
@return_error_info @return_error_info
async def get_relationship_between_accounts(context, account1, account2, observer=None): async def get_relationship_between_accounts(context, account1, account2, observer=None):
......
...@@ -106,7 +106,7 @@ def valid_tag(tag, allow_empty=False): ...@@ -106,7 +106,7 @@ def valid_tag(tag, allow_empty=False):
def valid_number(num, lbound, ubound, default, name): def valid_number(num, lbound, ubound, default, name):
"""Given a user-provided number, return a valid int, or raise.""" """Given a user-provided number, return a valid int, or raise."""
if not num: if not num and num != 0:
assert default is not None, "%s must be provided" % name assert default is not None, "%s must be provided" % name
num = default num = default
try: try:
......
...@@ -9,12 +9,13 @@ from hive.server.common.helpers import json_date ...@@ -9,12 +9,13 @@ from hive.server.common.helpers import json_date
import datetime import datetime
@return_error_info @return_error_info
async def list_comments(context, start: list, limit: int, order: str): async def list_comments(context, start: list, limit: int = 1000, order: str = None):
"""Returns all comments, starting with the specified options.""" """Returns all comments, starting with the specified options."""
supported_order_list = ['by_cashout_time', 'by_permlink', 'by_root', 'by_parent', 'by_last_update', 'by_author_last_update'] supported_order_list = ['by_cashout_time', 'by_permlink', 'by_root', 'by_parent', 'by_last_update', 'by_author_last_update']
assert not order is None, "missing a required argument: 'order'"
assert order in supported_order_list, "Unsupported order, valid orders: {}".format(", ".join(supported_order_list)) assert order in supported_order_list, "Unsupported order, valid orders: {}".format(", ".join(supported_order_list))
limit = valid_limit(limit, 1000, None) limit = valid_limit(limit, 1000, 1000)
db = context['db'] db = context['db']
result = [] result = []
...@@ -94,6 +95,7 @@ async def find_comments(context, comments: list): ...@@ -94,6 +95,7 @@ async def find_comments(context, comments: list):
""" Search for comments: limit and order is ignored in hive code """ """ Search for comments: limit and order is ignored in hive code """
result = [] result = []
assert isinstance(comments, list), "Expected array of author+permlink pairs"
assert len(comments) <= 1000, "Parameters count is greather than max allowed (1000)" assert len(comments) <= 1000, "Parameters count is greather than max allowed (1000)"
db = context['db'] db = context['db']
...@@ -149,16 +151,23 @@ async def find_comments(context, comments: list): ...@@ -149,16 +151,23 @@ async def find_comments(context, comments: list):
idx = 0 idx = 0
values = "" values = ""
for arg in comments: for arg in comments:
if not isinstance(arg, list) or len(arg) < 2:
continue
author = arg[0]
permlink = arg[1]
if not isinstance(author, str) or not isinstance(permlink, str):
continue
if idx > 0: if idx > 0:
values += "," values += ","
values += "('{}','{}')".format(arg[0], arg[1]) values += "('{}','{}')".format(author, permlink) # escaping most likely needed
idx += 1 idx += 1
sql = SQL_TEMPLATE.format(values) sql = SQL_TEMPLATE.format(values)
rows = await db.query_all(sql) if idx > 0:
for row in rows: rows = await db.query_all(sql)
cpo = database_post_object(dict(row)) for row in rows:
result.append(cpo) cpo = database_post_object(dict(row))
result.append(cpo)
return { "comments": result } return { "comments": result }
...@@ -188,9 +197,9 @@ def api_vote_info(rows, votes_presentation): ...@@ -188,9 +197,9 @@ def api_vote_info(rows, votes_presentation):
return ret return ret
@return_error_info @return_error_info
async def find_votes_impl(db, author: str, permlink: str, votes_presentation): async def find_votes_impl(db, author: str, permlink: str, votes_presentation, limit: int = 1000):
sql = "SELECT * FROM find_votes(:author,:permlink)" sql = "SELECT * FROM find_votes(:author,:permlink,:limit)"
rows = await db.query_all(sql, author=author, permlink=permlink) rows = await db.query_all(sql, author=author, permlink=permlink, limit=limit)
return api_vote_info(rows, votes_presentation) return api_vote_info(rows, votes_presentation)
@return_error_info @return_error_info
...@@ -201,11 +210,12 @@ async def find_votes(context, author: str, permlink: str): ...@@ -201,11 +210,12 @@ async def find_votes(context, author: str, permlink: str):
return { 'votes': await find_votes_impl(context['db'], author, permlink, VotesPresentation.DatabaseApi) } return { 'votes': await find_votes_impl(context['db'], author, permlink, VotesPresentation.DatabaseApi) }
@return_error_info @return_error_info
async def list_votes(context, start: list, limit: int, order: str): async def list_votes(context, start: list, limit: int = 1000, order: str = None):
""" Returns all votes, starting with the specified voter and/or author and permlink. """ """ Returns all votes, starting with the specified voter and/or author and permlink. """
supported_order_list = ["by_comment_voter", "by_voter_comment"] supported_order_list = ["by_comment_voter", "by_voter_comment"]
assert order in supported_order_list, "Order {} is not supported".format(order) assert not order is None, "missing a required argument: 'order'"
limit = valid_limit(limit, 1000, None) assert order in supported_order_list, "Unsupported order, valid orders: {}".format(", ".join(supported_order_list))
limit = valid_limit(limit, 1000, 1000)
db = context['db'] db = context['db']
if order == "by_voter_comment": if order == "by_voter_comment":
......
from hive.db.adapter import Db from hive.db.adapter import Db
from hive.utils.timer import time_it
DB = Db.instance() DB = Db.instance()
""" """
...@@ -50,6 +51,7 @@ update_active_sql = """ ...@@ -50,6 +51,7 @@ update_active_sql = """
def update_all_posts_active(): def update_all_posts_active():
DB.query_no_return(update_active_sql.format( "WHERE ( children = 0 OR hp1.counter_deleted > 0 ) AND depth > 0" )) DB.query_no_return(update_active_sql.format( "WHERE ( children = 0 OR hp1.counter_deleted > 0 ) AND depth > 0" ))
@time_it
def update_active_starting_from_posts_on_block( first_block_num, last_block_num ): def update_active_starting_from_posts_on_block( first_block_num, last_block_num ):
if first_block_num == last_block_num: if first_block_num == last_block_num:
DB.query_no_return(update_active_sql.format( "WHERE block_num={} AND depth > 0" ).format(first_block_num) ) DB.query_no_return(update_active_sql.format( "WHERE block_num={} AND depth > 0" ).format(first_block_num) )
......
"""Timer for reporting progress on long batch operations.""" """Timer for reporting progress on long batch operations."""
from textwrap import wrap
from time import perf_counter as perf from time import perf_counter as perf
from hive.utils.normalize import secs_to_str from hive.utils.normalize import secs_to_str
from functools import wraps
import logging
log = logging.getLogger(__name__)
#timeit decorator for measuring method execution time
def time_it(method):
@wraps(method)
def time_method(*args, **kwargs):
start_time = perf()
result = method(*args, **kwargs)
log.info("%s executed in %.4f s", method.__name__, perf() - start_time)
return result
return time_method
class Timer: class Timer:
"""Times long routines, printing status and ETA. """Times long routines, printing status and ETA.
......
...@@ -3,6 +3,7 @@ import decimal ...@@ -3,6 +3,7 @@ import decimal
from hive.db.adapter import Db from hive.db.adapter import Db
from hive.utils.timer import time_it
DB = Db.instance() DB = Db.instance()
...@@ -12,6 +13,7 @@ def update_all_hot_and_tranding(): ...@@ -12,6 +13,7 @@ def update_all_hot_and_tranding():
NO_CONSTRAINT = -1 NO_CONSTRAINT = -1
@time_it
def update_hot_and_tranding_for_block_range( first_block = NO_CONSTRAINT, last_block = NO_CONSTRAINT): def update_hot_and_tranding_for_block_range( first_block = NO_CONSTRAINT, last_block = NO_CONSTRAINT):
"""Calculate and set hot and trending values of all posts""" """Calculate and set hot and trending values of all posts"""
hot_and_trend_sql = """ hot_and_trend_sql = """
......
Subproject commit 71c363476f8a64ad8a5b94e732055b08b9540400 Subproject commit e2a0a15101613b27788d66a689cb1a7700bdd4b5