Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 127-eliminate-more-references-to-hive_posts_view
  • 221-vacuum-hivemind-tables
  • 267-rebased-onto-develop
  • 267-update-notification-cache-3
  • 676-as-tiny-assets
  • 72-postgres-warning-about-wrong-collation-in-recursive_deps-2
  • abw_ecv_op_experiment
  • abw_max_retries
  • abw_post_delete_fix
  • abw_rshares_experiment
  • add-git-to-requirements
  • add-vote-info
  • arc-get-content-deleted
  • as-tmp-to-remove
  • asuch-limit-follows
  • asuch-postgrest-fixing-problems
  • asuch-replace-python-with-sql-get-follow-count
  • asuch-tmp-wip-condenser-get-blog
  • autoexplain-python
  • bridge_api.get_ranked_posts_fixes
  • bw_1_27_5rc8_2master
  • bw_develop-haf-rebase
  • bw_docker_supplement
  • bw_entrypoint_signal_handler_fix
  • bw_haf_compat_sync
  • bw_hafah_datasource_test
  • bw_master2develop
  • bw_mi/hivemind_wit_sa_btracker_rebase
  • bw_rebased_develop-haf
  • bw_restore_log_memory_usage_call
  • bw_simplify_blacklisted_by_observer_view
  • bw_temp_hived_source_node_verification
  • bw_update_posts_rshares_speedup
  • bw_v1_27_5_0_0_rc0
  • change-python-limits
  • cherry-pick-5dd1da34
  • cherry-pick-98eaf112
  • complete-refactor
  • db-upgrade-assert
  • deployed_20200917
  • deployed_20200928
  • deployed_20200928_pure
  • deployed_20200928_reversible_ops
  • deployed_fixes_2_develop
  • develop
  • develop-haf-backup
  • dk-benchmarks-ci-improvements
  • dk-communities-unit-tests
  • dk-get-ids-for-accounts-hotfix
  • dk-issue-3-concurrent-block-query
  • dk-list-votes-pre24
  • dk-migration-script-tags-support
  • dk-num-block-hive-feed-cache
  • dk-readme-update
  • dk-reputation_api_support
  • dk-revert-black-lists
  • dk-sql-file-list
  • dk-sql-script-executor
  • dk-sql-scripts-from-schema
  • dk-xdist-and-time
  • dn-autovacuum
  • dn-default-autovacuum
  • dn-testing
  • dn_get_block_range
  • dn_parallel_safe
  • dn_prof
  • doc-fix
  • dockerfile-update-fix
  • emf-limit-follows
  • enum_block_operations-support
  • feature/beneficiaries_communities
  • feature/hive_votes_no_index
  • feature/mute-reason-test
  • feature/mute-reason_rebase
  • feature/new_communities_type_old
  • feature/new_community_types
  • feature/role-only-if-subscribed-test1
  • fix-duplicate-pinned-posts
  • fixing-tests-with-limits
  • follow-deltas
  • follow-redesign
  • follow-redesign-speedups
  • follow-redesign-tests
  • follow_api_tests
  • get-discussion-experiment
  • hivemind_testers
  • imwatsi-first-steps
  • jes2850-decentralized-lists
  • jsalyers-add-a-cascade
  • jsalyers-fix-muting-for-reblogs
  • jsalyers-fix-muting-on-bridge-get-discussion
  • jsalyers-muting-v2
  • jsalyers-test-mute-changes
  • kbotor/backup/building-hivemind-from-other-repos
  • kbotor/building-hivemind-from-other-repos
  • kbotor/ci-rewrite-for-parallel-replay
  • km_ah_api
  • km_get_content_2_0
  • km_get_content_fill_missing2deployed
  • km_history
  • 0.25.4
  • 1.25.0rc0
  • 1.25.2rc
  • 1.26.0
  • 1.26.1
  • 1.26.2
  • 1.26.3
  • 1.27.0.dev0
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.3.0.0
  • 1.27.3.0.0dev11
  • 1.27.3.0.0dev12
  • 1.27.3.0.0dev7
  • 1.27.5
  • 1.27.5.0.0rc7
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc3
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • Before-dk-issue-3
  • Full-sync
  • Full-sync-20200928
  • Full-sync-20201026
  • ScheduledBenchmarkTesting_18_Aug
  • env/staging-permissions
  • full_hive_sync_17_05_2021
  • git_versioning_support
  • hivemind_ci_support
  • tmp-logs
  • v1.24-RC
  • v1.24.0
  • v1.24.1
  • v1.24.1-RC
  • v1.24.2
  • v1.25.1
  • v1.25.2
  • v1.25.3
  • v1.25.4
  • v1.26.0
  • v1.27.4.0.0
  • v1.27.4.0.0dev1
  • v1.27.4.0.0dev2
160 results

Target

Select target project
  • hive/hivemind
1 result
Select Git revision
  • 127-eliminate-more-references-to-hive_posts_view
  • 221-vacuum-hivemind-tables
  • 267-rebased-onto-develop
  • 267-update-notification-cache-3
  • 676-as-tiny-assets
  • 72-postgres-warning-about-wrong-collation-in-recursive_deps-2
  • abw_ecv_op_experiment
  • abw_max_retries
  • abw_post_delete_fix
  • abw_rshares_experiment
  • add-git-to-requirements
  • add-vote-info
  • arc-get-content-deleted
  • as-tmp-to-remove
  • asuch-limit-follows
  • asuch-postgrest-fixing-problems
  • asuch-replace-python-with-sql-get-follow-count
  • asuch-tmp-wip-condenser-get-blog
  • autoexplain-python
  • bridge_api.get_ranked_posts_fixes
  • bw_1_27_5rc8_2master
  • bw_develop-haf-rebase
  • bw_docker_supplement
  • bw_entrypoint_signal_handler_fix
  • bw_haf_compat_sync
  • bw_hafah_datasource_test
  • bw_master2develop
  • bw_mi/hivemind_wit_sa_btracker_rebase
  • bw_rebased_develop-haf
  • bw_restore_log_memory_usage_call
  • bw_simplify_blacklisted_by_observer_view
  • bw_temp_hived_source_node_verification
  • bw_update_posts_rshares_speedup
  • bw_v1_27_5_0_0_rc0
  • change-python-limits
  • cherry-pick-5dd1da34
  • cherry-pick-98eaf112
  • complete-refactor
  • db-upgrade-assert
  • deployed_20200917
  • deployed_20200928
  • deployed_20200928_pure
  • deployed_20200928_reversible_ops
  • deployed_fixes_2_develop
  • develop
  • develop-haf-backup
  • dk-benchmarks-ci-improvements
  • dk-communities-unit-tests
  • dk-get-ids-for-accounts-hotfix
  • dk-issue-3-concurrent-block-query
  • dk-list-votes-pre24
  • dk-migration-script-tags-support
  • dk-num-block-hive-feed-cache
  • dk-readme-update
  • dk-reputation_api_support
  • dk-revert-black-lists
  • dk-sql-file-list
  • dk-sql-script-executor
  • dk-sql-scripts-from-schema
  • dk-xdist-and-time
  • dn-autovacuum
  • dn-default-autovacuum
  • dn-testing
  • dn_get_block_range
  • dn_parallel_safe
  • dn_prof
  • doc-fix
  • dockerfile-update-fix
  • emf-limit-follows
  • enum_block_operations-support
  • feature/beneficiaries_communities
  • feature/hive_votes_no_index
  • feature/mute-reason-test
  • feature/mute-reason_rebase
  • feature/new_communities_type_old
  • feature/new_community_types
  • feature/role-only-if-subscribed-test1
  • fix-duplicate-pinned-posts
  • fixing-tests-with-limits
  • follow-deltas
  • follow-redesign
  • follow-redesign-speedups
  • follow-redesign-tests
  • follow_api_tests
  • get-discussion-experiment
  • hivemind_testers
  • imwatsi-first-steps
  • jes2850-decentralized-lists
  • jsalyers-add-a-cascade
  • jsalyers-fix-muting-for-reblogs
  • jsalyers-fix-muting-on-bridge-get-discussion
  • jsalyers-muting-v2
  • jsalyers-test-mute-changes
  • kbotor/backup/building-hivemind-from-other-repos
  • kbotor/building-hivemind-from-other-repos
  • kbotor/ci-rewrite-for-parallel-replay
  • km_ah_api
  • km_get_content_2_0
  • km_get_content_fill_missing2deployed
  • km_history
  • 0.25.4
  • 1.25.0rc0
  • 1.25.2rc
  • 1.26.0
  • 1.26.1
  • 1.26.2
  • 1.26.3
  • 1.27.0.dev0
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.3.0.0
  • 1.27.3.0.0dev11
  • 1.27.3.0.0dev12
  • 1.27.3.0.0dev7
  • 1.27.5
  • 1.27.5.0.0rc7
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc3
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • Before-dk-issue-3
  • Full-sync
  • Full-sync-20200928
  • Full-sync-20201026
  • ScheduledBenchmarkTesting_18_Aug
  • env/staging-permissions
  • full_hive_sync_17_05_2021
  • git_versioning_support
  • hivemind_ci_support
  • tmp-logs
  • v1.24-RC
  • v1.24.0
  • v1.24.1
  • v1.24.1-RC
  • v1.24.2
  • v1.25.1
  • v1.25.2
  • v1.25.3
  • v1.25.4
  • v1.26.0
  • v1.27.4.0.0
  • v1.27.4.0.0dev1
  • v1.27.4.0.0dev2
160 results
Show changes
Commits on Source (3)
......@@ -177,7 +177,11 @@ class Db:
try:
start = perf()
query = self._sql_text(sql)
if 'log_query' in kwargs and kwargs['log_query']:
log.info("QUERY: {}".format(query))
result = self._exec(query, **kwargs)
if 'log_result' in kwargs and kwargs['log_result']:
log.info("RESULT: {}".format(result))
Stats.log_db(sql, perf() - start)
return result
except Exception as e:
......
......@@ -224,10 +224,13 @@ def build_metadata():
sa.Column('created_at', sa.DateTime, nullable=False),
sa.Column('blacklisted', sa.Boolean, nullable=False, server_default='0'),
sa.Column('follow_blacklists', sa.Boolean, nullable=False, server_default='0'),
sa.Column('block_num', sa.Integer, nullable=False ),
sa.PrimaryKeyConstraint('following', 'follower', name='hive_follows_pk'), # core
sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_follows_fk1'),
sa.Index('hive_follows_ix5a', 'following', 'state', 'created_at', 'follower'),
sa.Index('hive_follows_ix5b', 'follower', 'state', 'created_at', 'following'),
sa.Index('hive_follows_block_num_idx', 'block_num')
)
sa.Table(
......@@ -235,12 +238,15 @@ def build_metadata():
sa.Column('account', VARCHAR(16), nullable=False),
sa.Column('post_id', sa.Integer, nullable=False),
sa.Column('created_at', sa.DateTime, nullable=False),
sa.Column('block_num', sa.Integer, nullable=False ),
sa.ForeignKeyConstraint(['account'], ['hive_accounts.name'], name='hive_reblogs_fk1'),
sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_reblogs_fk2'),
sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_reblogs_fk3'),
sa.PrimaryKeyConstraint('account', 'post_id', name='hive_reblogs_pk'), # core
sa.Index('hive_reblogs_account', 'account'),
sa.Index('hive_reblogs_post_id', 'post_id'),
sa.Index('hive_reblogs_block_num_idx', 'block_num')
)
sa.Table(
......
"""Blocks processor."""
from hive.indexer.reblog import Reblog
import logging
import json
......@@ -62,6 +63,7 @@ class Blocks:
Tags.flush()
Votes.flush()
Posts.flush()
Reblog.flush()
block_num = int(block['block_id'][:8], base=16)
cls.on_live_blocks_processed( block_num, block_num )
time_end = perf_counter()
......@@ -103,6 +105,7 @@ class Blocks:
folllow_items = len(Follow.follow_items_to_flush) + Follow.flush(trx=False)
flush_time = register_time(flush_time, "Follow", folllow_items)
flush_time = register_time(flush_time, "Posts", Posts.flush())
flush_time = register_time(flush_time, "Reblog", Reblog.flush())
if (not is_initial_sync) and (first_block > -1):
cls.on_live_blocks_processed( first_block, last_num )
......
"""Main custom_json op handler."""
import logging
from funcy.seqs import first, second
from hive.db.adapter import Db
from hive.db.db_state import DbState
from hive.indexer.accounts import Accounts
from hive.indexer.posts import Posts
from hive.indexer.feed_cache import FeedCache
from hive.indexer.follow import Follow
from hive.indexer.reblog import Reblog
from hive.indexer.notify import Notify
from hive.indexer.community import process_json_community_op, START_BLOCK
......@@ -44,7 +42,7 @@ class CustomOp:
"""Given a list of operation in block, filter and process them."""
for op in ops:
start = OPSM.start()
opName = str(op['id']) + ( '-ignored' if op['id'] not in ['follow', 'community', 'notify'] else '' )
opName = str(op['id']) + ( '-ignored' if op['id'] not in ['follow', 'community', 'notify', 'reblog'] else '' )
account = _get_auth(op)
if not account:
......@@ -54,13 +52,16 @@ class CustomOp:
if op['id'] == 'follow':
if block_num < 6000000 and not isinstance(op_json, list):
op_json = ['follow', op_json] # legacy compat
cls._process_legacy(account, op_json, block_date)
cls._process_legacy(account, op_json, block_date, block_num)
elif op['id'] == 'reblog':
if block_num < 6000000 and not isinstance(op_json, list):
op_json = ['reblog', op_json] # legacy compat
cls._process_legacy(account, op_json, block_date, block_num)
elif op['id'] == 'community':
if block_num > START_BLOCK:
process_json_community_op(account, op_json, block_date)
elif op['id'] == 'notify':
cls._process_notify(account, op_json, block_date)
OPSM.op_stats(opName, OPSM.stop(start))
@classmethod
......@@ -81,7 +82,7 @@ class CustomOp:
log.warning("notify op fail: %s in %s", e, op_json)
@classmethod
def _process_legacy(cls, account, op_json, block_date):
def _process_legacy(cls, account, op_json, block_date, block_num):
"""Handle legacy 'follow' plugin ops (follow/mute/clear, reblog)
follow {follower: {type: 'account'},
......@@ -103,70 +104,6 @@ class CustomOp:
cmd, op_json = op_json # ['follow', {data...}]
if cmd == 'follow':
Follow.follow_op(account, op_json, block_date)
Follow.follow_op(account, op_json, block_date, block_num)
elif cmd == 'reblog':
cls.reblog(account, op_json, block_date)
@classmethod
def reblog(cls, account, op_json, block_date):
"""Handle legacy 'reblog' op"""
if ('account' not in op_json
or 'author' not in op_json
or 'permlink' not in op_json):
return
blogger = op_json['account']
author = op_json['author']
permlink = op_json['permlink']
if blogger != account:
return # impersonation
if not all(map(Accounts.exists, [author, blogger])):
return
if 'delete' in op_json and op_json['delete'] == 'delete':
sql = """
WITH processing_set AS (
SELECT hp.id as post_id, ha.id as account_id
FROM hive_posts hp
INNER JOIN hive_accounts ha ON hp.author_id = ha.id
INNER JOIN hive_permlink_data hpd ON hp.permlink_id = hpd.id
WHERE ha.name = :a AND hpd.permlink = :permlink AND hp.depth <= 0
)
DELETE FROM hive_reblogs AS hr
WHERE hr.account = :a AND hr.post_id IN (SELECT ps.post_id FROM processing_set ps)
RETURNING hr.post_id, (SELECT ps.account_id FROM processing_set ps) AS account_id
"""
row = DB.query_row(sql, a=blogger, permlink=permlink)
if row is None:
log.debug("reblog: post not found: %s/%s", author, permlink)
return
if not DbState.is_initial_sync():
result = dict(row)
FeedCache.delete(result['post_id'], result['account_id'])
else:
sql = """
INSERT INTO hive_reblogs (account, post_id, created_at)
SELECT ha.name, hp.id as post_id, :date
FROM hive_accounts ha
INNER JOIN hive_posts hp ON hp.author_id = ha.id
INNER JOIN hive_permlink_data hpd ON hpd.id = hp.permlink_id
WHERE ha.name = :a AND hpd.permlink = :p
ON CONFLICT (account, post_id) DO NOTHING
RETURNING post_id
"""
row = DB.query_row(sql, a=blogger, p=permlink, date=block_date)
if not DbState.is_initial_sync():
author_id = Accounts.get_id(author)
blogger_id = Accounts.get_id(blogger)
if row is not None:
result = dict(row)
post_id = result['post_id']
FeedCache.insert(post_id, blogger_id, block_date)
Notify('reblog', src_id=blogger_id, dst_id=author_id,
post_id=post_id, when=block_date,
score=Accounts.default_score(blogger)).write()
else:
log.error("Error in reblog: row is None!")
Reblog.reblog_op(account, op_json, block_date, block_num)
......@@ -17,7 +17,7 @@ FOLLOWERS = 'followers'
FOLLOWING = 'following'
FOLLOW_ITEM_INSERT_QUERY = """
INSERT INTO hive_follows as hf (follower, following, created_at, state, blacklisted, follow_blacklists)
INSERT INTO hive_follows as hf (follower, following, created_at, state, blacklisted, follow_blacklists, block_num)
VALUES
(
:flr,
......@@ -35,7 +35,8 @@ FOLLOW_ITEM_INSERT_QUERY = """
WHEN 4 THEN TRUE
ELSE TRUE
END
)
),
:block_num
)
ON CONFLICT (follower, following) DO UPDATE
SET
......@@ -71,11 +72,12 @@ class Follow:
follow_items_to_flush = dict()
@classmethod
def follow_op(cls, account, op_json, date):
def follow_op(cls, account, op_json, date, block_num):
"""Process an incoming follow op."""
op = cls._validated_op(account, op_json, date)
if not op:
return
op['block_num'] = block_num
# perform delta check
new_state = op['state']
......@@ -94,7 +96,8 @@ class Follow:
flr=op['flr'],
flg=op['flg'],
state=op['state'],
at=op['at'])
at=op['at'],
block_num=op['block_num'])
else:
old_state = cls._get_follow_db_state(op['flr'], op['flg'])
......@@ -171,7 +174,7 @@ class Follow:
@classmethod
def _flush_follow_items(cls):
sql_prefix = """
INSERT INTO hive_follows as hf (follower, following, created_at, state, blacklisted, follow_blacklists)
INSERT INTO hive_follows as hf (follower, following, created_at, state, blacklisted, follow_blacklists, block_num)
VALUES """
sql_postfix = """
......@@ -198,20 +201,22 @@ class Follow:
count = 0
for _, follow_item in cls.follow_items_to_flush.items():
if count < limit:
values.append("({}, {}, '{}', {}, {}, {})".format(follow_item['flr'], follow_item['flg'],
values.append("({}, {}, '{}', {}, {}, {}, {})".format(follow_item['flr'], follow_item['flg'],
follow_item['at'], follow_item['state'],
follow_item['state'] == 3,
follow_item['state'] == 4))
follow_item['state'] == 4,
follow_item['block_num']))
count = count + 1
else:
query = sql_prefix + ",".join(values)
query += sql_postfix
DB.query(query)
values.clear()
values.append("({}, {}, '{}', {}, {}, {})".format(follow_item['flr'], follow_item['flg'],
values.append("({}, {}, '{}', {}, {}, {}, {})".format(follow_item['flr'], follow_item['flg'],
follow_item['at'], follow_item['state'],
follow_item['state'] == 3,
follow_item['state'] == 4))
follow_item['state'] == 4,
follow_item['block_num']))
count = 1
if len(values) > 0:
......
""" Class for reblog operations """
import logging
from hive.db.adapter import Db
from hive.db.db_state import DbState
from hive.indexer.accounts import Accounts
from hive.indexer.feed_cache import FeedCache
from hive.indexer.notify import Notify
DB = Db.instance()
log = logging.getLogger(__name__)
DELETE_SQL = """
WITH processing_set AS (
SELECT hp.id as post_id, ha.id as account_id
FROM hive_posts hp
INNER JOIN hive_accounts ha ON hp.author_id = ha.id
INNER JOIN hive_permlink_data hpd ON hp.permlink_id = hpd.id
WHERE ha.name = :a AND hpd.permlink = :permlink AND hp.depth <= 0
)
DELETE FROM hive_reblogs AS hr
WHERE hr.account = :a AND hr.post_id IN (SELECT ps.post_id FROM processing_set ps)
RETURNING hr.post_id, (SELECT ps.account_id FROM processing_set ps) AS account_id
"""
SELECT_SQL = """
SELECT :blogger as blogger, hp.id as post_id, :date as date, :block_num as block_num
FROM hive_posts hp
INNER JOIN hive_accounts ha ON ha.id = hp.author_id
INNER JOIN hive_permlink_data hpd ON hpd.id = hp.permlink_id
WHERE ha.name = :author AND hpd.permlink = :permlink AND hp.depth <= 0
"""
INSERT_SQL = """
INSERT INTO hive_reblogs (account, post_id, created_at, block_num)
""" + SELECT_SQL + """
ON CONFLICT ON CONSTRAINT hive_reblogs_pk DO NOTHING
RETURNING post_id
"""
class Reblog():
""" Class for reblog operations """
reblog_items_to_flush = []
@classmethod
def reblog_op(cls, account, op_json, block_date, block_num):
if 'account' not in op_json or \
'author' not in op_json or \
'permlink' not in op_json:
return
blogger = op_json['account']
author = op_json['author']
permlink = op_json['permlink']
if blogger != account:
return # impersonation
if not all(map(Accounts.exists, [author, blogger])):
return
if 'delete' in op_json and op_json['delete'] == 'delete':
row = DB.query_row(DELETE_SQL, a=blogger, permlink=permlink)
if row is None:
log.debug("reblog: post not found: %s/%s", author, permlink)
return
if not DbState.is_initial_sync():
result = dict(row)
FeedCache.delete(result['post_id'], result['account_id'])
else:
if DbState.is_initial_sync():
row = DB.query_row(SELECT_SQL, blogger=blogger, author=author, permlink=permlink, date=block_date, block_num=block_num)
if row is not None:
result = dict(row)
cls.reblog_items_to_flush.append(result)
else:
row = DB.query_row(INSERT_SQL, blogger=blogger, author=author, permlink=permlink, date=block_date, block_num=block_num)
if row is not None:
author_id = Accounts.get_id(author)
blogger_id = Accounts.get_id(blogger)
result = dict(row)
post_id = result['post_id']
FeedCache.insert(post_id, blogger_id, block_date)
Notify('reblog', src_id=blogger_id, dst_id=author_id,
post_id=post_id, when=block_date,
score=Accounts.default_score(blogger)).write()
else:
log.warning("Error in reblog: Insert operation returned `None` as `post_id`. Op details: {}".format(op_json))
@classmethod
def flush(cls):
sql_prefix = """
INSERT INTO hive_reblogs (account, post_id, created_at, block_num)
VALUES
"""
sql_postfix = """
ON CONFLICT ON CONSTRAINT hive_reblogs_pk DO NOTHING
"""
values = []
limit = 1000
count = 0
item_count = len(cls.reblog_items_to_flush)
for reblog_item in cls.reblog_items_to_flush:
if count < limit:
values.append("('{}', {}, '{}', {})".format(reblog_item["blogger"], reblog_item["post_id"], reblog_item["date"], reblog_item["block_num"]))
count = count + 1
else:
query = sql_prefix + ",".join(values)
query += sql_postfix
DB.query(query)
values.clear()
values.append("('{}', {}, '{}', {})".format(reblog_item["blogger"], reblog_item["post_id"], reblog_item["date"], reblog_item["block_num"]))
count = 1
if len(values) > 0:
query = sql_prefix + ",".join(values)
query += sql_postfix
DB.query(query)
cls.reblog_items_to_flush.clear()
return item_count
\ No newline at end of file
"""Hive sync manager."""
from hive.indexer.reblog import Reblog
import logging
import glob
from time import perf_counter as perf
......@@ -211,6 +212,12 @@ class Sync:
def run(self):
"""Initialize state; setup/recovery checks; sync and runloop."""
from hive.version import VERSION, GIT_REVISION
log.info("hivemind_version : %s", VERSION)
log.info("hivemind_git_rev : %s", GIT_REVISION)
from hive.db.schema import DB_VERSION as SCHEMA_DB_VERSION
log.info("database_schema_version : %s", SCHEMA_DB_VERSION)
# ensure db schema up to date, check app status
DbState.initialize()
......@@ -228,6 +235,10 @@ class Sync:
# community stats
Community.recalc_pending_payouts()
sql = "SELECT num FROM hive_blocks ORDER BY num DESC LIMIT 1"
database_head_block = DbState.db().query_one(sql)
log.info("database_head_block : %s", database_head_block)
if DbState.is_initial_sync():
last_imported_block = Blocks.head_num()
# resume initial sync
......