Skip to content
Snippets Groups Projects
Commit b7128908 authored by Bartek Wrona's avatar Bartek Wrona
Browse files

Merge branch 'index_rebuilt_limit' into 'develop'

During sync, massive-sync conditions (dropped indexes, FKs, tables switched to...

See merge request !128
parents 1caea49a 331cf88a
No related branches found
No related tags found
4 merge requests!456Release candidate v1 24,!230Setup monitoring with pghero,!138Small typos fixed,!128During sync, massive-sync conditions (dropped indexes, FKs, tables switched to...
......@@ -46,8 +46,6 @@ class DbState:
log.info("[INIT] Create db schema...")
setup(cls.db())
cls._before_initial_sync()
# perform db migrations
cls._check_migrations()
......@@ -131,12 +129,18 @@ class DbState:
return to_return
@classmethod
def _before_initial_sync(cls):
def before_initial_sync(cls, last_imported_block, hived_head_block):
"""Routine which runs *once* after db setup.
Disables non-critical indexes for faster initial sync, as well
as foreign key constraints."""
to_sync = hived_head_block - last_imported_block
if to_sync < SYNCED_BLOCK_LIMIT:
log.info("[INIT] Skipping pre-initial sync hooks")
return
engine = cls.db().engine()
log.info("[INIT] Begin pre-initial sync hooks")
......@@ -156,6 +160,22 @@ class DbState:
log.info("[INIT] Finish pre-initial sync hooks")
@classmethod
def update_work_mem(cls, workmem_value):
row = cls.db().query_row("SHOW work_mem")
current_work_mem = row['work_mem']
sql = """
DO $$
BEGIN
EXECUTE 'ALTER DATABASE '||current_database()||' SET work_mem TO "{}"';
END
$$;
"""
cls.db().query_no_return(sql.format(workmem_value))
return current_work_mem
@classmethod
def _after_initial_sync(cls, current_imported_block, last_imported_block):
"""Routine which runs *once* after initial sync.
......@@ -184,6 +204,8 @@ class DbState:
else:
log.info("[INIT] Post-initial sync hooks skipped")
current_work_mem = cls.update_work_mem('2GB')
time_start = perf_counter()
# Update count of all child posts (what was hold during initial sync)
......@@ -231,12 +253,14 @@ class DbState:
time_end = perf_counter()
log.info("[INIT] update_all_posts_active executed in %fs", time_end - time_start)
cls.update_work_mem(current_work_mem)
from hive.db.schema import create_fk, set_logged_table_attribute
set_logged_table_attribute(cls.db(), True)
if synced_blocks >= SYNCED_BLOCK_LIMIT:
from hive.db.schema import create_fk, set_logged_table_attribute
set_logged_table_attribute(cls.db(), True)
log.info("Recreating FKs")
create_fk(cls.db())
log.info("Recreating FKs")
create_fk(cls.db())
@staticmethod
def status():
......
......@@ -402,7 +402,7 @@ def drop_fk(db):
db.query_no_return("START TRANSACTION")
for table in build_metadata().sorted_tables:
for fk in table.foreign_keys:
sql = """ALTER TABLE {} DROP CONSTRAINT {}""".format(table.name, fk.name)
sql = """ALTER TABLE {} DROP CONSTRAINT IF EXISTS {}""".format(table.name, fk.name)
db.query_no_return(sql)
db.query_no_return("COMMIT")
......
......@@ -238,12 +238,14 @@ class Sync:
# community stats
Community.recalc_pending_payouts()
sql = "SELECT num FROM hive_blocks ORDER BY num DESC LIMIT 1"
database_head_block = DbState.db().query_one(sql)
log.info("database_head_block : %s", database_head_block)
last_imported_block = Blocks.head_num()
hived_head_block = self._conf.get('test_max_block') or self._steem.last_irreversible()
log.info("database_head_block : %s", last_imported_block)
log.info("target_head_block : %s", hived_head_block)
if DbState.is_initial_sync():
last_imported_block = Blocks.head_num()
DbState.before_initial_sync(last_imported_block, hived_head_block)
# resume initial sync
self.initial()
if not CONTINUE_PROCESSING:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment