diff --git a/hive/db/schema.py b/hive/db/schema.py
index c1388fdead1f655dcd59baab755f6fee2a4b8402..282a96f0d6c0f3d901480b62d3ff2d5ab4d3335f 100644
--- a/hive/db/schema.py
+++ b/hive/db/schema.py
@@ -1,707 +1,700 @@
-"""Db schema definitions and setup routines."""
-
-import sqlalchemy as sa
-from sqlalchemy.sql import text as sql_text
-from sqlalchemy.types import SMALLINT
-from sqlalchemy.types import CHAR
-from sqlalchemy.types import VARCHAR
-from sqlalchemy.types import TEXT
-from sqlalchemy.types import BOOLEAN
-
-import logging
-log = logging.getLogger(__name__)
-
-#pylint: disable=line-too-long, too-many-lines, bad-whitespace
-
-# [DK] we changed and removed some tables so i upgraded DB_VERSION to 18
-DB_VERSION = 18
-
-def build_metadata():
-    """Build schema def with SqlAlchemy"""
-    metadata = sa.MetaData()
-
-    sa.Table(
-        'hive_blocks', metadata,
-        sa.Column('num', sa.Integer, primary_key=True, autoincrement=False),
-        sa.Column('hash', CHAR(40), nullable=False),
-        sa.Column('prev', CHAR(40)),
-        sa.Column('txs', SMALLINT, server_default='0', nullable=False),
-        sa.Column('ops', SMALLINT, server_default='0', nullable=False),
-        sa.Column('created_at', sa.DateTime, nullable=False),
-
-        sa.UniqueConstraint('hash', name='hive_blocks_ux1'),
-        sa.ForeignKeyConstraint(['prev'], ['hive_blocks.hash'], name='hive_blocks_fk1'),
-        sa.Index('hive_blocks_created_at_idx', 'created_at')
-    )
-
-    sa.Table(
-        'hive_accounts', metadata,
-        sa.Column('id', sa.Integer, primary_key=True),
-        sa.Column('name', VARCHAR(16, collation='C'), nullable=False),
-        sa.Column('created_at', sa.DateTime, nullable=False),
-        #sa.Column('block_num', sa.Integer, nullable=False),
-        sa.Column('reputation', sa.BigInteger, nullable=False, server_default='0'),
-        sa.Column('is_implicit', sa.Boolean, nullable=False, server_default='1'),
-        sa.Column('followers', sa.Integer, nullable=False, server_default='0'),
-        sa.Column('following', sa.Integer, nullable=False, server_default='0'),
-
-        sa.Column('rank', sa.Integer, nullable=False, server_default='0'),
-
-        sa.Column('lastread_at', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
-        sa.Column('posting_json_metadata', sa.Text),
-        sa.Column('json_metadata', sa.Text),
-
-        sa.UniqueConstraint('name', name='hive_accounts_ux1'),
-        sa.Index('hive_accounts_ix6', 'reputation')
-    )
-
-    sa.Table(
-        'hive_reputation_data', metadata,
-        sa.Column('id', sa.Integer, primary_key=True),
-        sa.Column('author_id', sa.Integer, nullable=False),
-        sa.Column('voter_id', sa.Integer, nullable=False),
-        sa.Column('permlink', sa.String(255, collation='C'), nullable=False),
-        sa.Column('rshares', sa.BigInteger, nullable=False),
-        sa.Column('block_num', sa.Integer,  nullable=False),
-
-        sa.Index('hive_reputation_data_author_permlink_voter_idx', 'author_id', 'permlink', 'voter_id'),
-        sa.Index('hive_reputation_data_block_num_idx', 'block_num')
-    )
-
-    sa.Table(
-        'hive_posts', metadata,
-        sa.Column('id', sa.Integer, primary_key=True),
-        sa.Column('root_id', sa.Integer, nullable=False), # records having initially set 0 will be updated to their id
-        sa.Column('parent_id', sa.Integer, nullable=False),
-        sa.Column('author_id', sa.Integer, nullable=False),
-        sa.Column('permlink_id', sa.Integer, nullable=False),
-        sa.Column('category_id', sa.Integer, nullable=False),
-        sa.Column('community_id', sa.Integer, nullable=True),
-        sa.Column('created_at', sa.DateTime, nullable=False),
-        sa.Column('depth', SMALLINT, nullable=False),
-        sa.Column('counter_deleted', sa.Integer, nullable=False, server_default='0'),
-        sa.Column('is_pinned', BOOLEAN, nullable=False, server_default='0'),
-        sa.Column('is_muted', BOOLEAN, nullable=False, server_default='0'),
-        sa.Column('is_valid', BOOLEAN, nullable=False, server_default='1'),
-        sa.Column('promoted', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'),
-
-        sa.Column('children', sa.Integer, nullable=False, server_default='0'),
-
-        # core stats/indexes
-        sa.Column('payout', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'),
-        sa.Column('pending_payout', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'),
-        sa.Column('payout_at', sa.DateTime, nullable=False, server_default='1970-01-01'),
-        sa.Column('last_payout_at', sa.DateTime, nullable=False, server_default='1970-01-01'),
-        sa.Column('updated_at', sa.DateTime, nullable=False, server_default='1970-01-01'),
-        sa.Column('is_paidout', BOOLEAN, nullable=False, server_default='0'),
-
-        # ui flags/filters
-        sa.Column('is_nsfw', BOOLEAN, nullable=False, server_default='0'),
-        sa.Column('is_declined', BOOLEAN, nullable=False, server_default='0'),
-        sa.Column('is_full_power', BOOLEAN, nullable=False, server_default='0'),
-        sa.Column('is_hidden', BOOLEAN, nullable=False, server_default='0'),
-
-        # important indexes
-        sa.Column('sc_trend', sa.Float(precision=6), nullable=False, server_default='0'),
-        sa.Column('sc_hot', sa.Float(precision=6), nullable=False, server_default='0'),
-
-        sa.Column('total_payout_value', sa.String(30), nullable=False, server_default='0.000 HBD'),
-        sa.Column('author_rewards', sa.BigInteger, nullable=False, server_default='0'),
-
-        sa.Column('author_rewards_hive', sa.BigInteger, nullable=False, server_default='0'),
-        sa.Column('author_rewards_hbd', sa.BigInteger, nullable=False, server_default='0'),
-        sa.Column('author_rewards_vests', sa.BigInteger, nullable=False, server_default='0'),
-
-        sa.Column('abs_rshares', sa.Numeric, nullable=False, server_default='0'),
-        sa.Column('vote_rshares', sa.Numeric, nullable=False, server_default='0'),
-        sa.Column('total_vote_weight', sa.Numeric, nullable=False, server_default='0'),
-        sa.Column('active', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
-        sa.Column('cashout_time', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
-        sa.Column('percent_hbd', sa.Integer, nullable=False, server_default='10000'),
-
-        sa.Column('curator_payout_value', sa.String(30), nullable=False, server_default='0.000 HBD'),
-        sa.Column('max_accepted_payout',  sa.String(30), nullable=False, server_default='1000000.000 HBD'),
-        sa.Column('allow_votes', BOOLEAN, nullable=False, server_default='1'),
-        sa.Column('allow_curation_rewards', BOOLEAN, nullable=False, server_default='1'),
-        sa.Column('beneficiaries', sa.JSON, nullable=False, server_default='[]'),
-        sa.Column('block_num', sa.Integer,  nullable=False ),
-        sa.Column('block_num_created', sa.Integer,  nullable=False ),
-
-        sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_posts_fk1'),
-        sa.ForeignKeyConstraint(['root_id'], ['hive_posts.id'], name='hive_posts_fk2'),
-        sa.ForeignKeyConstraint(['parent_id'], ['hive_posts.id'], name='hive_posts_fk3'),
-        sa.UniqueConstraint('author_id', 'permlink_id', 'counter_deleted', name='hive_posts_ux1'),
-
-        sa.Index('hive_posts_depth_idx', 'depth'),
-
-        sa.Index('hive_posts_root_id_id_idx', 'root_id','id'),
-
-        sa.Index('hive_posts_parent_id_idx', 'parent_id'),
-        sa.Index('hive_posts_community_id_idx', 'community_id'),
-
-        sa.Index('hive_posts_category_id_idx', 'category_id'),
-        sa.Index('hive_posts_payout_at_idx', 'payout_at'),
-        sa.Index('hive_posts_payout_idx', 'payout'),
-        sa.Index('hive_posts_promoted_idx', 'promoted'),
-        sa.Index('hive_posts_sc_trend_id_is_paidout_idx', 'sc_trend', 'id', 'is_paidout'),
-        sa.Index('hive_posts_sc_hot_id_is_paidout_idx', 'sc_hot', 'id', 'is_paidout'),
-        sa.Index('hive_posts_author_id_created_at_idx', sa.text('author_id DESC, created_at DESC')),
-        sa.Index('hive_posts_block_num_idx', 'block_num'),
-        sa.Index('hive_posts_block_num_created_idx', 'block_num_created'),
-        sa.Index('hive_posts_cashout_time_id_idx', 'cashout_time', 'id'),
-        sa.Index('hive_posts_updated_at_idx', sa.text('updated_at DESC')),
-        sa.Index('hive_posts_payout_plus_pending_payout_id_is_paidout_idx', sa.text('(payout+pending_payout), id, is_paidout'))
-    )
-
-    sa.Table(
-        'hive_post_data', metadata,
-        sa.Column('id', sa.Integer, primary_key=True, autoincrement=False),
-        sa.Column('title', VARCHAR(512), nullable=False, server_default=''),
-        sa.Column('preview', VARCHAR(1024), nullable=False, server_default=''), # first 1k of 'body'
-        sa.Column('img_url', VARCHAR(1024), nullable=False, server_default=''), # first 'image' from 'json'
-        sa.Column('body', TEXT, nullable=False, server_default=''),
-        sa.Column('json', TEXT, nullable=False, server_default='')
-    )
-
-    sa.Table(
-        'hive_permlink_data', metadata,
-        sa.Column('id', sa.Integer, primary_key=True),
-        sa.Column('permlink', sa.String(255, collation='C'), nullable=False),
-        sa.UniqueConstraint('permlink', name='hive_permlink_data_permlink')
-    )
-
-    sa.Table(
-        'hive_category_data', metadata,
-        sa.Column('id', sa.Integer, primary_key=True),
-        sa.Column('category', sa.String(255, collation='C'), nullable=False),
-        sa.UniqueConstraint('category', name='hive_category_data_category')
-    )
-
-    sa.Table(
-        'hive_votes', metadata,
-        sa.Column('id', sa.BigInteger, primary_key=True),
-        sa.Column('post_id', sa.Integer, nullable=False),
-        sa.Column('voter_id', sa.Integer, nullable=False),
-        sa.Column('author_id', sa.Integer, nullable=False),
-        sa.Column('permlink_id', sa.Integer, nullable=False),
-        sa.Column('weight', sa.Numeric, nullable=False, server_default='0'),
-        sa.Column('rshares', sa.BigInteger, nullable=False, server_default='0'),
-        sa.Column('vote_percent', sa.Integer, server_default='0'),
-        sa.Column('last_update', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
-        sa.Column('num_changes', sa.Integer, server_default='0'),
-        sa.Column('block_num', sa.Integer,  nullable=False ),
-        sa.Column('is_effective', BOOLEAN, nullable=False, server_default='0'),
-
-        sa.UniqueConstraint('voter_id', 'author_id', 'permlink_id', name='hive_votes_voter_id_author_id_permlink_id_uk'),
-
-        sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_votes_fk1'),
-        sa.ForeignKeyConstraint(['voter_id'], ['hive_accounts.id'], name='hive_votes_fk2'),
-        sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_votes_fk3'),
-        sa.ForeignKeyConstraint(['permlink_id'], ['hive_permlink_data.id'], name='hive_votes_fk4'),
-        sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_votes_fk5'),
-
-        sa.Index('hive_votes_voter_id_post_id_idx', 'voter_id', 'post_id'), # probably this index is redundant to hive_votes_voter_id_last_update_idx because of starting voter_id.
-        sa.Index('hive_votes_voter_id_last_update_idx', 'voter_id', 'last_update'), # this index is critical for hive_accounts_info_view performance
-        sa.Index('hive_votes_post_id_voter_id_idx', 'post_id', 'voter_id'),
-        sa.Index('hive_votes_block_num_idx', 'block_num') # this is also important for hive_accounts_info_view 
-    )
-
-    sa.Table(
-        'hive_tag_data', metadata,
-        sa.Column('id', sa.Integer, nullable=False, primary_key=True),
-        sa.Column('tag', VARCHAR(64, collation='C'), nullable=False, server_default=''),
-        sa.UniqueConstraint('tag', name='hive_tag_data_ux1')
-    )
-
-    sa.Table(
-        'hive_post_tags', metadata,
-        sa.Column('post_id', sa.Integer, nullable=False),
-        sa.Column('tag_id', sa.Integer, nullable=False),
-        sa.PrimaryKeyConstraint('post_id', 'tag_id', name='hive_post_tags_pk1'),
-
-        sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_post_tags_fk1'),
-        sa.ForeignKeyConstraint(['tag_id'], ['hive_tag_data.id'], name='hive_post_tags_fk2'),
-
-        sa.Index('hive_post_tags_tag_id_idx', 'tag_id')
-    )
-
-    sa.Table(
-        'hive_follows', metadata,
-        sa.Column('id', sa.Integer, primary_key=True ),
-        sa.Column('follower', sa.Integer, nullable=False),
-        sa.Column('following', sa.Integer, nullable=False),
-        sa.Column('state', SMALLINT, nullable=False, server_default='1'),
-        sa.Column('created_at', sa.DateTime, nullable=False),
-        sa.Column('blacklisted', sa.Boolean, nullable=False, server_default='0'),
-        sa.Column('follow_blacklists', sa.Boolean, nullable=False, server_default='0'),
-        sa.Column('follow_muted', BOOLEAN, nullable=False, server_default='0'),
-        sa.Column('block_num', sa.Integer,  nullable=False ),
-
-        sa.UniqueConstraint('following', 'follower', name='hive_follows_ux1'), # core
-        sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_follows_fk1'),
-        sa.Index('hive_follows_ix5a', 'following', 'state', 'created_at', 'follower'),
-        sa.Index('hive_follows_ix5b', 'follower', 'state', 'created_at', 'following'),
-        sa.Index('hive_follows_block_num_idx', 'block_num'),
-        sa.Index('hive_follows_created_at_idx', 'created_at'),
-    )
-
-    sa.Table(
-        'hive_reblogs', metadata,
-        sa.Column('id', sa.Integer, primary_key=True ),
-        sa.Column('blogger_id', sa.Integer, nullable=False),
-        sa.Column('post_id', sa.Integer, nullable=False),
-        sa.Column('created_at', sa.DateTime, nullable=False),
-        sa.Column('block_num', sa.Integer,  nullable=False ),
-
-        sa.ForeignKeyConstraint(['blogger_id'], ['hive_accounts.id'], name='hive_reblogs_fk1'),
-        sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_reblogs_fk2'),
-        sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_reblogs_fk3'),
-        sa.UniqueConstraint('blogger_id', 'post_id', name='hive_reblogs_ux1'), # core
-        sa.Index('hive_reblogs_post_id', 'post_id'),
-        sa.Index('hive_reblogs_block_num_idx', 'block_num'),
-        sa.Index('hive_reblogs_created_at_idx', 'created_at')
-    )
-
-    sa.Table(
-        'hive_payments', metadata,
-        sa.Column('id', sa.Integer, primary_key=True),
-        sa.Column('block_num', sa.Integer, nullable=False),
-        sa.Column('tx_idx', SMALLINT, nullable=False),
-        sa.Column('post_id', sa.Integer, nullable=False),
-        sa.Column('from_account', sa.Integer, nullable=False),
-        sa.Column('to_account', sa.Integer, nullable=False),
-        sa.Column('amount', sa.types.DECIMAL(10, 3), nullable=False),
-        sa.Column('token', VARCHAR(5), nullable=False),
-
-        sa.ForeignKeyConstraint(['from_account'], ['hive_accounts.id'], name='hive_payments_fk1'),
-        sa.ForeignKeyConstraint(['to_account'], ['hive_accounts.id'], name='hive_payments_fk2'),
-        sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_payments_fk3'),
-        sa.Index('hive_payments_from', 'from_account'),
-        sa.Index('hive_payments_to', 'to_account'),
-        sa.Index('hive_payments_post_id', 'post_id'),
-    )
-
-    sa.Table(
-        'hive_feed_cache', metadata,
-        sa.Column('post_id', sa.Integer, nullable=False),
-        sa.Column('account_id', sa.Integer, nullable=False),
-        sa.Column('created_at', sa.DateTime, nullable=False),
-        sa.Column('block_num',    sa.Integer,  nullable=False),
-        sa.PrimaryKeyConstraint('account_id', 'post_id', name='hive_feed_cache_pk'),
-        sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_feed_cache_fk1'),
-
-        sa.Index('hive_feed_cache_block_num_idx', 'block_num'),
-        sa.Index('hive_feed_cache_created_at_idx', 'created_at')
-    )
-
-    sa.Table(
-        'hive_state', metadata,
-        sa.Column('block_num', sa.Integer, primary_key=True, autoincrement=False),
-        sa.Column('db_version', sa.Integer, nullable=False),
-        sa.Column('steem_per_mvest', sa.types.DECIMAL(14, 6), nullable=False),
-        sa.Column('usd_per_steem', sa.types.DECIMAL(14, 6), nullable=False),
-        sa.Column('sbd_per_steem', sa.types.DECIMAL(14, 6), nullable=False),
-        sa.Column('dgpo', sa.Text, nullable=False),
-    )
-
-    sa.Table(
-        'hive_posts_api_helper', metadata,
-        sa.Column('id', sa.Integer, primary_key=True, autoincrement = False),
-        sa.Column('author_s_permlink', VARCHAR(275, collation='C'), nullable=False), # concatenation of author '/' permlink
-        sa.Index('hive_posts_api_helper_author_s_permlink_idx', 'author_s_permlink')
-    )
-
-    sa.Table(
-        'hive_mentions', metadata,
-        sa.Column('id', sa.Integer, primary_key=True),
-        sa.Column('post_id', sa.Integer, nullable=False),
-        sa.Column('account_id', sa.Integer, nullable=False),
-        sa.Column('block_num', sa.Integer, nullable=False),
-
-        sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_mentions_fk1'),
-        sa.ForeignKeyConstraint(['account_id'], ['hive_accounts.id'], name='hive_mentions_fk2'),
-
-        sa.Index('hive_mentions_account_id_idx', 'account_id'),
-        sa.UniqueConstraint('post_id', 'account_id', 'block_num', name='hive_mentions_ux1')
-    )
-
-    metadata = build_metadata_community(metadata)
-
-    return metadata
-
-def build_metadata_community(metadata=None):
-    """Build community schema defs"""
-    if not metadata:
-        metadata = sa.MetaData()
-
-    sa.Table(
-        'hive_communities', metadata,
-        sa.Column('id',          sa.Integer,      primary_key=True, autoincrement=False),
-        sa.Column('type_id',     SMALLINT,        nullable=False),
-        sa.Column('lang',        CHAR(2),         nullable=False, server_default='en'),
-        sa.Column('name',        VARCHAR(16, collation='C'), nullable=False),
-        sa.Column('title',       sa.String(32),   nullable=False, server_default=''),
-        sa.Column('created_at',  sa.DateTime,     nullable=False),
-        sa.Column('sum_pending', sa.Integer,      nullable=False, server_default='0'),
-        sa.Column('num_pending', sa.Integer,      nullable=False, server_default='0'),
-        sa.Column('num_authors', sa.Integer,      nullable=False, server_default='0'),
-        sa.Column('rank',        sa.Integer,      nullable=False, server_default='0'),
-        sa.Column('subscribers', sa.Integer,      nullable=False, server_default='0'),
-        sa.Column('is_nsfw',     BOOLEAN,         nullable=False, server_default='0'),
-        sa.Column('about',       sa.String(120),  nullable=False, server_default=''),
-        sa.Column('primary_tag', sa.String(32),   nullable=False, server_default=''),
-        sa.Column('category',    sa.String(32),   nullable=False, server_default=''),
-        sa.Column('avatar_url',  sa.String(1024), nullable=False, server_default=''),
-        sa.Column('description', sa.String(5000), nullable=False, server_default=''),
-        sa.Column('flag_text',   sa.String(5000), nullable=False, server_default=''),
-        sa.Column('settings',    TEXT,            nullable=False, server_default='{}'),
-        sa.Column('block_num', sa.Integer,  nullable=False ),
-
-        sa.UniqueConstraint('name', name='hive_communities_ux1'),
-        sa.Index('hive_communities_ix1', 'rank', 'id'),
-        sa.Index('hive_communities_block_num_idx', 'block_num')
-    )
-
-    sa.Table(
-        'hive_roles', metadata,
-        sa.Column('account_id',   sa.Integer,     nullable=False),
-        sa.Column('community_id', sa.Integer,     nullable=False),
-        sa.Column('created_at',   sa.DateTime,    nullable=False),
-        sa.Column('role_id',      SMALLINT,       nullable=False, server_default='0'),
-        sa.Column('title',        sa.String(140), nullable=False, server_default=''),
-
-        sa.PrimaryKeyConstraint('account_id', 'community_id', name='hive_roles_pk'),
-        sa.Index('hive_roles_ix1', 'community_id', 'account_id', 'role_id'),
-    )
-
-    sa.Table(
-        'hive_subscriptions', metadata,
-        sa.Column('id', sa.Integer, primary_key=True),
-        sa.Column('account_id',   sa.Integer,  nullable=False),
-        sa.Column('community_id', sa.Integer,  nullable=False),
-        sa.Column('created_at',   sa.DateTime, nullable=False),
-        sa.Column('block_num', sa.Integer,  nullable=False ),
-
-        sa.UniqueConstraint('account_id', 'community_id', name='hive_subscriptions_ux1'),
-        sa.Index('hive_subscriptions_community_idx', 'community_id'),
-        sa.Index('hive_subscriptions_block_num_idx', 'block_num')
-    )
-
-    sa.Table(
-        'hive_notifs', metadata,
-        sa.Column('id',           sa.Integer,  primary_key=True),
-        sa.Column('block_num',    sa.Integer,  nullable=False),
-        sa.Column('type_id',      SMALLINT,    nullable=False),
-        sa.Column('score',        SMALLINT,    nullable=False),
-        sa.Column('created_at',   sa.DateTime, nullable=False),
-        sa.Column('src_id',       sa.Integer,  nullable=True),
-        sa.Column('dst_id',       sa.Integer,  nullable=True),
-        sa.Column('post_id',      sa.Integer,  nullable=True),
-        sa.Column('community_id', sa.Integer,  nullable=True),
-        sa.Column('block_num',    sa.Integer,  nullable=False),
-        sa.Column('payload',      sa.Text,     nullable=True),
-
-        sa.Index('hive_notifs_ix1', 'dst_id',                  'id', postgresql_where=sql_text("dst_id IS NOT NULL")),
-        sa.Index('hive_notifs_ix2', 'community_id',            'id', postgresql_where=sql_text("community_id IS NOT NULL")),
-        sa.Index('hive_notifs_ix3', 'community_id', 'type_id', 'id', postgresql_where=sql_text("community_id IS NOT NULL")),
-        sa.Index('hive_notifs_ix4', 'community_id', 'post_id', 'type_id', 'id', postgresql_where=sql_text("community_id IS NOT NULL AND post_id IS NOT NULL")),
-        sa.Index('hive_notifs_ix5', 'post_id', 'type_id', 'dst_id', 'src_id', postgresql_where=sql_text("post_id IS NOT NULL AND type_id IN (16,17)")), # filter: dedupe
-        sa.Index('hive_notifs_ix6', 'dst_id', 'created_at', 'score', 'id', postgresql_where=sql_text("dst_id IS NOT NULL")), # unread
-    )
-
-    sa.Table('hive_notification_cache', metadata,
-        sa.Column('id', sa.BigInteger, primary_key=True),
-        sa.Column('block_num', sa.Integer, nullable = False),
-        sa.Column('type_id', sa.Integer, nullable = False),
-        sa.Column('dst', sa.Integer, nullable=True), # dst account id except persistent notifs from hive_notifs
-        sa.Column('src', sa.Integer, nullable=True), # src account id
-        sa.Column('dst_post_id', sa.Integer, nullable=True), # destination post id
-        sa.Column('post_id', sa.Integer, nullable=True),
-        sa.Column('created_at', sa.DateTime, nullable=False), # notification creation time
-        sa.Column('score', sa.Integer, nullable=False),
-        sa.Column('community_title', sa.String(32), nullable=True),
-        sa.Column('community', sa.String(16), nullable=True),
-        sa.Column('payload', sa.String, nullable=True),
-
-        sa.Index('hive_notification_cache_block_num_idx', 'block_num'),
-        sa.Index('hive_notification_cache_dst_score_idx', 'dst', 'score', postgresql_where=sql_text("dst IS NOT NULL"))
-
-    )
-
-    return metadata
-
-
-def teardown(db):
-    """Drop all tables"""
-    build_metadata().drop_all(db.engine())
-
-def drop_fk(db):
-    db.query_no_return("START TRANSACTION")
-    for table in build_metadata().sorted_tables:
-        for fk in table.foreign_keys:
-            sql = """ALTER TABLE {} DROP CONSTRAINT IF EXISTS {}""".format(table.name, fk.name)
-            db.query_no_return(sql)
-    db.query_no_return("COMMIT")
-
-def create_fk(db):
-    from sqlalchemy.schema import AddConstraint
-    from sqlalchemy import text
-    connection = db.engine().connect()
-    connection.execute(text("START TRANSACTION"))
-    for table in build_metadata().sorted_tables:
-        for fk in table.foreign_keys:
-            connection.execute(AddConstraint(fk.constraint))
-    connection.execute(text("COMMIT"))
-
-def setup(db):
-    """Creates all tables and seed data"""
-    # initialize schema
-    build_metadata().create_all(db.engine())
-
-    # tune auto vacuum/analyze
-    reset_autovac(db)
-
-    # sets FILLFACTOR:
-    set_fillfactor(db)
-
-    # default rows
-    sqls = [
-        "INSERT INTO hive_state (block_num, db_version, steem_per_mvest, usd_per_steem, sbd_per_steem, dgpo) VALUES (0, %d, 0, 0, 0, '')" % DB_VERSION,
-        "INSERT INTO hive_blocks (num, hash, created_at) VALUES (0, '0000000000000000000000000000000000000000', '2016-03-24 16:04:57')",
-
-        "INSERT INTO hive_permlink_data (id, permlink) VALUES (0, '')",
-        "INSERT INTO hive_category_data (id, category) VALUES (0, '')",
-        "INSERT INTO hive_tag_data (id, tag) VALUES (0, '')",
-        "INSERT INTO hive_accounts (id, name, created_at) VALUES (0, '', '1970-01-01T00:00:00')",
-
-        "INSERT INTO hive_accounts (name, created_at) VALUES ('miners',    '2016-03-24 16:05:00')",
-        "INSERT INTO hive_accounts (name, created_at) VALUES ('null',      '2016-03-24 16:05:00')",
-        "INSERT INTO hive_accounts (name, created_at) VALUES ('temp',      '2016-03-24 16:05:00')",
-        "INSERT INTO hive_accounts (name, created_at) VALUES ('initminer', '2016-03-24 16:05:00')",
-
-        """
-        INSERT INTO
-            public.hive_posts(id, root_id, parent_id, author_id, permlink_id, category_id,
-                community_id, created_at, depth, block_num, block_num_created
-            )
-        VALUES
-            (0, 0, 0, 0, 0, 0, 0, now(), 0, 0, 0);
-        """]
-    for sql in sqls:
-        db.query(sql)
-
-    sql = "CREATE INDEX hive_communities_ft1 ON hive_communities USING GIN (to_tsvector('english', title || ' ' || about))"
-    db.query(sql)
-
-    # find_comment_id definition moved to utility_functions.sql
-    # find_account_id definition moved to utility_functions.sql
-
-    # process_hive_post_operation definition moved to hive_post_operations.sql
-    # delete_hive_post moved to hive_post_operations.sql
-
-    # In original hivemind, a value of 'active_at' was calculated from
-    # max
-    #   {
-    #     created             ( account_create_operation ),
-    #     last_account_update ( account_update_operation/account_update2_operation ),
-    #     last_post           ( comment_operation - only creation )
-    #     last_root_post      ( comment_operation - only creation + only ROOT ),
-    #     last_vote_time      ( vote_operation )
-    #   }
-    # In order to simplify calculations, `last_account_update` is not taken into consideration, because this updating accounts is very rare
-    # and posting/voting after an account updating, fixes `active_at` value immediately.
-
-    # hive_accounts_view definition moved to hive_accounts_view.sql
-
-    # hive_posts_view definition moved to hive_posts_view.sql
-
-    # update_hive_posts_root_id moved to update_hive_posts_root_id.sql
-
-    # hive_votes_view definition moved into hive_votes_view.sql
-
-    # database_api_vote, find_votes, list_votes_by_voter_comment, list_votes_by_comment_voter moved into database_api_list_votes.sql
-
-    sql = """
-          DO $$
-          BEGIN
-            EXECUTE 'ALTER DATABASE '||current_database()||' SET join_collapse_limit TO 16';
-            EXECUTE 'ALTER DATABASE '||current_database()||' SET from_collapse_limit TO 16';
-          END
-          $$;
-          """
-    db.query_no_return(sql)
-
-    sql = """
-          CREATE TABLE IF NOT EXISTS hive_db_patch_level
-          (
-            level SERIAL NOT NULL PRIMARY KEY,
-            patch_date timestamp without time zone NOT NULL,
-            patched_to_revision TEXT
-          );
-    """
-    db.query_no_return(sql)
-    sql = """
-          INSERT INTO hive_db_patch_level
-          (patch_date, patched_to_revision)
-          values
-          (now(), '{}');
-          """
-
-    from hive.version import GIT_REVISION
-    db.query_no_return(sql.format(GIT_REVISION))
-
-    # max_time_stamp definition moved into utility_functions.sql
-
-    # get_discussion definition moved to bridge_get_discussion.sql
-
-    sql_scripts = [
-      "utility_functions.sql",
-      "hive_accounts_view.sql",
-      "hive_accounts_info_view.sql",
-      "hive_posts_base_view.sql",
-      "hive_posts_view.sql",
-      "hive_votes_view.sql",
-      "hive_post_operations.sql",
-      "head_block_time.sql",
-      "update_feed_cache.sql",
-      "payout_stats_view.sql",
-      "update_hive_posts_mentions.sql",
-      "find_tag_id.sql",
-      "bridge_get_ranked_post_type.sql",
-      "bridge_get_ranked_post_for_communities.sql",
-      "bridge_get_ranked_post_for_observer_communities.sql",
-      "bridge_get_ranked_post_for_tag.sql",
-      "bridge_get_ranked_post_for_all.sql",
-      "calculate_account_reputations.sql",
-      "update_communities_rank.sql",
-      "delete_hive_posts_mentions.sql",
-      "notifications_view.sql",
-      "notifications_api.sql",
-      "bridge_get_account_posts_by_comments.sql",
-      "bridge_get_account_posts_by_payout.sql",
-      "bridge_get_account_posts_by_posts.sql",
-      "bridge_get_account_posts_by_replies.sql",
-      "bridge_get_relationship_between_accounts.sql",
-      "bridge_get_post.sql",
-      "bridge_get_discussion.sql",
-      "condenser_api_post_type.sql",
-      "condenser_api_post_ex_type.sql",
-      "condenser_get_blog.sql",
-      "condenser_get_content.sql",
-      "condenser_get_discussions_by_created.sql",
-      "condenser_get_discussions_by_blog.sql",
-      "hot_and_trends.sql",
-      "condenser_get_discussions_by_trending.sql",
-      "condenser_get_discussions_by_hot.sql",
-      "condenser_get_discussions_by_promoted.sql",
-      "condenser_get_post_discussions_by_payout.sql",
-      "condenser_get_comment_discussions_by_payout.sql",
-      "update_hive_posts_children_count.sql",
-      "update_hive_posts_api_helper.sql",
-      "database_api_list_comments.sql",
-      "database_api_list_votes.sql",
-      "update_posts_rshares.sql",
-      "update_hive_post_root_id.sql",
-      "condenser_get_by_replies_to_account.sql",
-      "condenser_get_by_account_comments.sql",
-      "condenser_get_by_blog_without_reblog.sql",
-      "condenser_get_by_feed_with_reblog.sql",
-      "condenser_get_by_blog.sql",
-      "bridge_get_account_posts_by_blog.sql",
-      "condenser_get_follow_counts.sql",
-      "condenser_get_names_by_followers.sql",
-      "condenser_get_names_by_following.sql",
-      "condenser_get_names_by_reblogged.sql",
-      "condenser_get_discussions_by_comments.sql",
-      "condenser_get_account_reputations.sql"
-
-    ]
-    from os.path import dirname, realpath
-    dir_path = dirname(realpath(__file__))
-    for script in sql_scripts:
-        execute_sql_script(db.query_no_return, "{}/sql_scripts/{}".format(dir_path, script))
-    
-    
-
-
-
-def reset_autovac(db):
-    """Initializes/resets per-table autovacuum/autoanalyze params.
-
-    We use a scale factor of 0 and specify exact threshold tuple counts,
-    per-table, in the format (autovacuum_threshold, autoanalyze_threshold)."""
-
-    autovac_config = { #    vacuum  analyze
-        'hive_accounts':    (50000, 100000),
-        'hive_posts':       (2500, 10000),
-        'hive_post_tags':   (5000, 10000),
-        'hive_follows':     (5000, 5000),
-        'hive_feed_cache':  (5000, 5000),
-        'hive_blocks':      (5000, 25000),
-        'hive_reblogs':     (5000, 5000),
-        'hive_payments':    (5000, 5000),
-    }
-
-    for table, (n_vacuum, n_analyze) in autovac_config.items():
-        sql = """ALTER TABLE %s SET (autovacuum_vacuum_scale_factor = 0,
-                                     autovacuum_vacuum_threshold = %s,
-                                     autovacuum_analyze_scale_factor = 0,
-                                     autovacuum_analyze_threshold = %s)"""
-        db.query(sql % (table, n_vacuum, n_analyze))
-
-
-def set_fillfactor(db):
-    """Initializes/resets FILLFACTOR for tables which are intesively updated"""
-
-    fillfactor_config = {
-        'hive_posts': 70,
-        'hive_post_data': 70,
-        'hive_votes': 70,
-        'hive_reputation_data': 50
-    }
-
-    for table, fillfactor in fillfactor_config.items():
-        sql = """ALTER TABLE {} SET (FILLFACTOR = {})"""
-        db.query(sql.format(table, fillfactor))
-
-def set_logged_table_attribute(db, logged):
-    """Initializes/resets LOGGED/UNLOGGED attribute for tables which are intesively updated"""
-
-    logged_config = [
-        'hive_accounts',
-        'hive_permlink_data',
-        'hive_post_tags',
-        'hive_posts',
-        'hive_post_data',
-        'hive_votes',
-        'hive_reputation_data'
-    ]
-
-    for table in logged_config:
-        log.info("Setting {} attribute on a table: {}".format('LOGGED' if logged else 'UNLOGGED', table))
-        sql = """ALTER TABLE {} SET {}"""
-        db.query_no_return(sql.format(table, 'LOGGED' if logged else 'UNLOGGED'))
-
-def execute_sql_script(query_executor, path_to_script):
-    """ Load and execute sql script from file
-        Params:
-          query_executor - callable to execute query with
-          path_to_script - path to script
-        Returns:
-          depending on query_executor
-
-        Example:
-          print(execute_sql_script(db.query_row, "./test.sql"))
-          where test_sql: SELECT * FROM hive_state WHERE block_num = 0;
-          will return something like: (0, 18, Decimal('0.000000'), Decimal('0.000000'), Decimal('0.000000'), '')
-    """
-    try:
-        sql_script = None
-        with open(path_to_script, 'r') as sql_script_file:
-            sql_script = sql_script_file.read()
-        if sql_script is not None:
-            return query_executor(sql_script)
-    except Exception as ex:
-        log.exception("Error running sql script: {}".format(ex))
-        raise ex
-    return None
+"""Db schema definitions and setup routines."""
+
+import sqlalchemy as sa
+from sqlalchemy.sql import text as sql_text
+from sqlalchemy.types import SMALLINT
+from sqlalchemy.types import CHAR
+from sqlalchemy.types import VARCHAR
+from sqlalchemy.types import TEXT
+from sqlalchemy.types import BOOLEAN
+
+import logging
+log = logging.getLogger(__name__)
+
+#pylint: disable=line-too-long, too-many-lines, bad-whitespace
+
+# [DK] we changed and removed some tables so i upgraded DB_VERSION to 18
+DB_VERSION = 18
+
+def build_metadata():
+    """Build schema def with SqlAlchemy"""
+    metadata = sa.MetaData()
+
+    sa.Table(
+        'hive_blocks', metadata,
+        sa.Column('num', sa.Integer, primary_key=True, autoincrement=False),
+        sa.Column('hash', CHAR(40), nullable=False),
+        sa.Column('prev', CHAR(40)),
+        sa.Column('txs', SMALLINT, server_default='0', nullable=False),
+        sa.Column('ops', SMALLINT, server_default='0', nullable=False),
+        sa.Column('created_at', sa.DateTime, nullable=False),
+
+        sa.UniqueConstraint('hash', name='hive_blocks_ux1'),
+        sa.ForeignKeyConstraint(['prev'], ['hive_blocks.hash'], name='hive_blocks_fk1'),
+        sa.Index('hive_blocks_created_at_idx', 'created_at')
+    )
+
+    sa.Table(
+        'hive_accounts', metadata,
+        sa.Column('id', sa.Integer, primary_key=True),
+        sa.Column('name', VARCHAR(16, collation='C'), nullable=False),
+        sa.Column('created_at', sa.DateTime, nullable=False),
+        #sa.Column('block_num', sa.Integer, nullable=False),
+        sa.Column('reputation', sa.BigInteger, nullable=False, server_default='0'),
+        sa.Column('is_implicit', sa.Boolean, nullable=False, server_default='1'),
+        sa.Column('followers', sa.Integer, nullable=False, server_default='0'),
+        sa.Column('following', sa.Integer, nullable=False, server_default='0'),
+
+        sa.Column('rank', sa.Integer, nullable=False, server_default='0'),
+
+        sa.Column('lastread_at', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
+        sa.Column('posting_json_metadata', sa.Text),
+        sa.Column('json_metadata', sa.Text),
+
+        sa.UniqueConstraint('name', name='hive_accounts_ux1'),
+        sa.Index('hive_accounts_ix6', 'reputation')
+    )
+
+    sa.Table(
+        'hive_reputation_data', metadata,
+        sa.Column('id', sa.Integer, primary_key=True),
+        sa.Column('author_id', sa.Integer, nullable=False),
+        sa.Column('voter_id', sa.Integer, nullable=False),
+        sa.Column('permlink', sa.String(255, collation='C'), nullable=False),
+        sa.Column('rshares', sa.BigInteger, nullable=False),
+        sa.Column('block_num', sa.Integer,  nullable=False),
+
+        sa.Index('hive_reputation_data_author_permlink_voter_idx', 'author_id', 'permlink', 'voter_id'),
+        sa.Index('hive_reputation_data_block_num_idx', 'block_num')
+    )
+
+    sa.Table(
+        'hive_posts', metadata,
+        sa.Column('id', sa.Integer, primary_key=True),
+        sa.Column('root_id', sa.Integer, nullable=False), # records having initially set 0 will be updated to their id
+        sa.Column('parent_id', sa.Integer, nullable=False),
+        sa.Column('author_id', sa.Integer, nullable=False),
+        sa.Column('permlink_id', sa.Integer, nullable=False),
+        sa.Column('category_id', sa.Integer, nullable=False),
+        sa.Column('community_id', sa.Integer, nullable=True),
+        sa.Column('created_at', sa.DateTime, nullable=False),
+        sa.Column('depth', SMALLINT, nullable=False),
+        sa.Column('counter_deleted', sa.Integer, nullable=False, server_default='0'),
+        sa.Column('is_pinned', BOOLEAN, nullable=False, server_default='0'),
+        sa.Column('is_muted', BOOLEAN, nullable=False, server_default='0'),
+        sa.Column('is_valid', BOOLEAN, nullable=False, server_default='1'),
+        sa.Column('promoted', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'),
+
+        sa.Column('children', sa.Integer, nullable=False, server_default='0'),
+
+        # core stats/indexes
+        sa.Column('payout', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'),
+        sa.Column('pending_payout', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'),
+        sa.Column('payout_at', sa.DateTime, nullable=False, server_default='1970-01-01'),
+        sa.Column('last_payout_at', sa.DateTime, nullable=False, server_default='1970-01-01'),
+        sa.Column('updated_at', sa.DateTime, nullable=False, server_default='1970-01-01'),
+        sa.Column('is_paidout', BOOLEAN, nullable=False, server_default='0'),
+
+        # ui flags/filters
+        sa.Column('is_nsfw', BOOLEAN, nullable=False, server_default='0'),
+        sa.Column('is_declined', BOOLEAN, nullable=False, server_default='0'),
+        sa.Column('is_full_power', BOOLEAN, nullable=False, server_default='0'),
+        sa.Column('is_hidden', BOOLEAN, nullable=False, server_default='0'),
+
+        # important indexes
+        sa.Column('sc_trend', sa.Float(precision=6), nullable=False, server_default='0'),
+        sa.Column('sc_hot', sa.Float(precision=6), nullable=False, server_default='0'),
+
+        sa.Column('total_payout_value', sa.String(30), nullable=False, server_default='0.000 HBD'),
+        sa.Column('author_rewards', sa.BigInteger, nullable=False, server_default='0'),
+
+        sa.Column('author_rewards_hive', sa.BigInteger, nullable=False, server_default='0'),
+        sa.Column('author_rewards_hbd', sa.BigInteger, nullable=False, server_default='0'),
+        sa.Column('author_rewards_vests', sa.BigInteger, nullable=False, server_default='0'),
+
+        sa.Column('abs_rshares', sa.Numeric, nullable=False, server_default='0'),
+        sa.Column('vote_rshares', sa.Numeric, nullable=False, server_default='0'),
+        sa.Column('total_vote_weight', sa.Numeric, nullable=False, server_default='0'),
+        sa.Column('active', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
+        sa.Column('cashout_time', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
+        sa.Column('percent_hbd', sa.Integer, nullable=False, server_default='10000'),
+
+        sa.Column('curator_payout_value', sa.String(30), nullable=False, server_default='0.000 HBD'),
+        sa.Column('max_accepted_payout',  sa.String(30), nullable=False, server_default='1000000.000 HBD'),
+        sa.Column('allow_votes', BOOLEAN, nullable=False, server_default='1'),
+        sa.Column('allow_curation_rewards', BOOLEAN, nullable=False, server_default='1'),
+        sa.Column('beneficiaries', sa.JSON, nullable=False, server_default='[]'),
+        sa.Column('block_num', sa.Integer,  nullable=False ),
+        sa.Column('block_num_created', sa.Integer,  nullable=False ),
+
+        sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_posts_fk1'),
+        sa.ForeignKeyConstraint(['root_id'], ['hive_posts.id'], name='hive_posts_fk2'),
+        sa.ForeignKeyConstraint(['parent_id'], ['hive_posts.id'], name='hive_posts_fk3'),
+        sa.UniqueConstraint('author_id', 'permlink_id', 'counter_deleted', name='hive_posts_ux1'),
+
+        sa.Index('hive_posts_depth_idx', 'depth'),
+
+        sa.Index('hive_posts_root_id_id_idx', 'root_id','id'),
+
+        sa.Index('hive_posts_parent_id_idx', 'parent_id'),
+        sa.Index('hive_posts_community_id_idx', 'community_id'),
+
+        sa.Index('hive_posts_category_id_idx', 'category_id'),
+        sa.Index('hive_posts_payout_at_idx', 'payout_at'),
+        sa.Index('hive_posts_payout_idx', 'payout'),
+        sa.Index('hive_posts_promoted_idx', 'promoted'),
+        sa.Index('hive_posts_sc_trend_id_is_paidout_idx', 'sc_trend', 'id', 'is_paidout'),
+        sa.Index('hive_posts_sc_hot_id_is_paidout_idx', 'sc_hot', 'id', 'is_paidout'),
+        sa.Index('hive_posts_author_id_created_at_idx', sa.text('author_id DESC, created_at DESC')),
+        sa.Index('hive_posts_block_num_idx', 'block_num'),
+        sa.Index('hive_posts_block_num_created_idx', 'block_num_created'),
+        sa.Index('hive_posts_cashout_time_id_idx', 'cashout_time', 'id'),
+        sa.Index('hive_posts_updated_at_idx', sa.text('updated_at DESC')),
+        sa.Index('hive_posts_payout_plus_pending_payout_id_is_paidout_idx', sa.text('(payout+pending_payout), id, is_paidout'))
+    )
+
+    sa.Table(
+        'hive_post_data', metadata,
+        sa.Column('id', sa.Integer, primary_key=True, autoincrement=False),
+        sa.Column('title', VARCHAR(512), nullable=False, server_default=''),
+        sa.Column('preview', VARCHAR(1024), nullable=False, server_default=''), # first 1k of 'body'
+        sa.Column('img_url', VARCHAR(1024), nullable=False, server_default=''), # first 'image' from 'json'
+        sa.Column('body', TEXT, nullable=False, server_default=''),
+        sa.Column('json', TEXT, nullable=False, server_default='')
+    )
+
+    sa.Table(
+        'hive_permlink_data', metadata,
+        sa.Column('id', sa.Integer, primary_key=True),
+        sa.Column('permlink', sa.String(255, collation='C'), nullable=False),
+        sa.UniqueConstraint('permlink', name='hive_permlink_data_permlink')
+    )
+
+    sa.Table(
+        'hive_category_data', metadata,
+        sa.Column('id', sa.Integer, primary_key=True),
+        sa.Column('category', sa.String(255, collation='C'), nullable=False),
+        sa.UniqueConstraint('category', name='hive_category_data_category')
+    )
+
+    sa.Table(
+        'hive_votes', metadata,
+        sa.Column('id', sa.BigInteger, primary_key=True),
+        sa.Column('post_id', sa.Integer, nullable=False),
+        sa.Column('voter_id', sa.Integer, nullable=False),
+        sa.Column('author_id', sa.Integer, nullable=False),
+        sa.Column('permlink_id', sa.Integer, nullable=False),
+        sa.Column('weight', sa.Numeric, nullable=False, server_default='0'),
+        sa.Column('rshares', sa.BigInteger, nullable=False, server_default='0'),
+        sa.Column('vote_percent', sa.Integer, server_default='0'),
+        sa.Column('last_update', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
+        sa.Column('num_changes', sa.Integer, server_default='0'),
+        sa.Column('block_num', sa.Integer,  nullable=False ),
+        sa.Column('is_effective', BOOLEAN, nullable=False, server_default='0'),
+
+        sa.UniqueConstraint('voter_id', 'author_id', 'permlink_id', name='hive_votes_voter_id_author_id_permlink_id_uk'),
+
+        sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_votes_fk1'),
+        sa.ForeignKeyConstraint(['voter_id'], ['hive_accounts.id'], name='hive_votes_fk2'),
+        sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_votes_fk3'),
+        sa.ForeignKeyConstraint(['permlink_id'], ['hive_permlink_data.id'], name='hive_votes_fk4'),
+        sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_votes_fk5'),
+
+        sa.Index('hive_votes_voter_id_post_id_idx', 'voter_id', 'post_id'), # probably this index is redundant to hive_votes_voter_id_last_update_idx because of starting voter_id.
+        sa.Index('hive_votes_voter_id_last_update_idx', 'voter_id', 'last_update'), # this index is critical for hive_accounts_info_view performance
+        sa.Index('hive_votes_post_id_voter_id_idx', 'post_id', 'voter_id'),
+        sa.Index('hive_votes_block_num_idx', 'block_num') # this is also important for hive_accounts_info_view 
+    )
+
+    sa.Table(
+        'hive_tag_data', metadata,
+        sa.Column('id', sa.Integer, nullable=False, primary_key=True),
+        sa.Column('tag', VARCHAR(64, collation='C'), nullable=False, server_default=''),
+        sa.UniqueConstraint('tag', name='hive_tag_data_ux1')
+    )
+
+    sa.Table(
+        'hive_post_tags', metadata,
+        sa.Column('post_id', sa.Integer, nullable=False),
+        sa.Column('tag_id', sa.Integer, nullable=False),
+        sa.PrimaryKeyConstraint('post_id', 'tag_id', name='hive_post_tags_pk1'),
+
+        sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_post_tags_fk1'),
+        sa.ForeignKeyConstraint(['tag_id'], ['hive_tag_data.id'], name='hive_post_tags_fk2'),
+
+        sa.Index('hive_post_tags_tag_id_idx', 'tag_id')
+    )
+
+    sa.Table(
+        'hive_follows', metadata,
+        sa.Column('id', sa.Integer, primary_key=True ),
+        sa.Column('follower', sa.Integer, nullable=False),
+        sa.Column('following', sa.Integer, nullable=False),
+        sa.Column('state', SMALLINT, nullable=False, server_default='1'),
+        sa.Column('created_at', sa.DateTime, nullable=False),
+        sa.Column('blacklisted', sa.Boolean, nullable=False, server_default='0'),
+        sa.Column('follow_blacklists', sa.Boolean, nullable=False, server_default='0'),
+        sa.Column('follow_muted', BOOLEAN, nullable=False, server_default='0'),
+        sa.Column('block_num', sa.Integer,  nullable=False ),
+
+        sa.UniqueConstraint('following', 'follower', name='hive_follows_ux1'), # core
+        sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_follows_fk1'),
+        sa.Index('hive_follows_ix5a', 'following', 'state', 'created_at', 'follower'),
+        sa.Index('hive_follows_ix5b', 'follower', 'state', 'created_at', 'following'),
+        sa.Index('hive_follows_block_num_idx', 'block_num'),
+        sa.Index('hive_follows_created_at_idx', 'created_at'),
+    )
+
+    sa.Table(
+        'hive_reblogs', metadata,
+        sa.Column('id', sa.Integer, primary_key=True ),
+        sa.Column('blogger_id', sa.Integer, nullable=False),
+        sa.Column('post_id', sa.Integer, nullable=False),
+        sa.Column('created_at', sa.DateTime, nullable=False),
+        sa.Column('block_num', sa.Integer,  nullable=False ),
+
+        sa.ForeignKeyConstraint(['blogger_id'], ['hive_accounts.id'], name='hive_reblogs_fk1'),
+        sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_reblogs_fk2'),
+        sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_reblogs_fk3'),
+        sa.UniqueConstraint('blogger_id', 'post_id', name='hive_reblogs_ux1'), # core
+        sa.Index('hive_reblogs_post_id', 'post_id'),
+        sa.Index('hive_reblogs_block_num_idx', 'block_num'),
+        sa.Index('hive_reblogs_created_at_idx', 'created_at')
+    )
+
+    sa.Table(
+        'hive_payments', metadata,
+        sa.Column('id', sa.Integer, primary_key=True),
+        sa.Column('block_num', sa.Integer, nullable=False),
+        sa.Column('tx_idx', SMALLINT, nullable=False),
+        sa.Column('post_id', sa.Integer, nullable=False),
+        sa.Column('from_account', sa.Integer, nullable=False),
+        sa.Column('to_account', sa.Integer, nullable=False),
+        sa.Column('amount', sa.types.DECIMAL(10, 3), nullable=False),
+        sa.Column('token', VARCHAR(5), nullable=False),
+
+        sa.ForeignKeyConstraint(['from_account'], ['hive_accounts.id'], name='hive_payments_fk1'),
+        sa.ForeignKeyConstraint(['to_account'], ['hive_accounts.id'], name='hive_payments_fk2'),
+        sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_payments_fk3'),
+        sa.Index('hive_payments_from', 'from_account'),
+        sa.Index('hive_payments_to', 'to_account'),
+        sa.Index('hive_payments_post_id', 'post_id'),
+    )
+
+    sa.Table(
+        'hive_feed_cache', metadata,
+        sa.Column('post_id', sa.Integer, nullable=False),
+        sa.Column('account_id', sa.Integer, nullable=False),
+        sa.Column('created_at', sa.DateTime, nullable=False),
+        sa.Column('block_num',    sa.Integer,  nullable=False),
+        sa.PrimaryKeyConstraint('account_id', 'post_id', name='hive_feed_cache_pk'),
+        sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_feed_cache_fk1'),
+
+        sa.Index('hive_feed_cache_block_num_idx', 'block_num'),
+        sa.Index('hive_feed_cache_created_at_idx', 'created_at')
+    )
+
+    sa.Table(
+        'hive_state', metadata,
+        sa.Column('block_num', sa.Integer, primary_key=True, autoincrement=False),
+        sa.Column('db_version', sa.Integer, nullable=False),
+        sa.Column('steem_per_mvest', sa.types.DECIMAL(14, 6), nullable=False),
+        sa.Column('usd_per_steem', sa.types.DECIMAL(14, 6), nullable=False),
+        sa.Column('sbd_per_steem', sa.types.DECIMAL(14, 6), nullable=False),
+        sa.Column('dgpo', sa.Text, nullable=False),
+    )
+
+    sa.Table(
+        'hive_posts_api_helper', metadata,
+        sa.Column('id', sa.Integer, primary_key=True, autoincrement = False),
+        sa.Column('author_s_permlink', VARCHAR(275, collation='C'), nullable=False), # concatenation of author '/' permlink
+        sa.Index('hive_posts_api_helper_author_s_permlink_idx', 'author_s_permlink')
+    )
+
+    sa.Table(
+        'hive_mentions', metadata,
+        sa.Column('id', sa.Integer, primary_key=True),
+        sa.Column('post_id', sa.Integer, nullable=False),
+        sa.Column('account_id', sa.Integer, nullable=False),
+        sa.Column('block_num', sa.Integer, nullable=False),
+
+        sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_mentions_fk1'),
+        sa.ForeignKeyConstraint(['account_id'], ['hive_accounts.id'], name='hive_mentions_fk2'),
+
+        sa.Index('hive_mentions_account_id_idx', 'account_id'),
+        sa.UniqueConstraint('post_id', 'account_id', 'block_num', name='hive_mentions_ux1')
+    )
+
+    metadata = build_metadata_community(metadata)
+
+    return metadata
+
+def build_metadata_community(metadata=None):
+    """Build community schema defs"""
+    if not metadata:
+        metadata = sa.MetaData()
+
+    sa.Table(
+        'hive_communities', metadata,
+        sa.Column('id',          sa.Integer,      primary_key=True, autoincrement=False),
+        sa.Column('type_id',     SMALLINT,        nullable=False),
+        sa.Column('lang',        CHAR(2),         nullable=False, server_default='en'),
+        sa.Column('name',        VARCHAR(16, collation='C'), nullable=False),
+        sa.Column('title',       sa.String(32),   nullable=False, server_default=''),
+        sa.Column('created_at',  sa.DateTime,     nullable=False),
+        sa.Column('sum_pending', sa.Integer,      nullable=False, server_default='0'),
+        sa.Column('num_pending', sa.Integer,      nullable=False, server_default='0'),
+        sa.Column('num_authors', sa.Integer,      nullable=False, server_default='0'),
+        sa.Column('rank',        sa.Integer,      nullable=False, server_default='0'),
+        sa.Column('subscribers', sa.Integer,      nullable=False, server_default='0'),
+        sa.Column('is_nsfw',     BOOLEAN,         nullable=False, server_default='0'),
+        sa.Column('about',       sa.String(120),  nullable=False, server_default=''),
+        sa.Column('primary_tag', sa.String(32),   nullable=False, server_default=''),
+        sa.Column('category',    sa.String(32),   nullable=False, server_default=''),
+        sa.Column('avatar_url',  sa.String(1024), nullable=False, server_default=''),
+        sa.Column('description', sa.String(5000), nullable=False, server_default=''),
+        sa.Column('flag_text',   sa.String(5000), nullable=False, server_default=''),
+        sa.Column('settings',    TEXT,            nullable=False, server_default='{}'),
+        sa.Column('block_num', sa.Integer,  nullable=False ),
+
+        sa.UniqueConstraint('name', name='hive_communities_ux1'),
+        sa.Index('hive_communities_ix1', 'rank', 'id'),
+        sa.Index('hive_communities_block_num_idx', 'block_num')
+    )
+
+    sa.Table(
+        'hive_roles', metadata,
+        sa.Column('account_id',   sa.Integer,     nullable=False),
+        sa.Column('community_id', sa.Integer,     nullable=False),
+        sa.Column('created_at',   sa.DateTime,    nullable=False),
+        sa.Column('role_id',      SMALLINT,       nullable=False, server_default='0'),
+        sa.Column('title',        sa.String(140), nullable=False, server_default=''),
+
+        sa.PrimaryKeyConstraint('account_id', 'community_id', name='hive_roles_pk'),
+        sa.Index('hive_roles_ix1', 'community_id', 'account_id', 'role_id'),
+    )
+
+    sa.Table(
+        'hive_subscriptions', metadata,
+        sa.Column('id', sa.Integer, primary_key=True),
+        sa.Column('account_id',   sa.Integer,  nullable=False),
+        sa.Column('community_id', sa.Integer,  nullable=False),
+        sa.Column('created_at',   sa.DateTime, nullable=False),
+        sa.Column('block_num', sa.Integer,  nullable=False ),
+
+        sa.UniqueConstraint('account_id', 'community_id', name='hive_subscriptions_ux1'),
+        sa.Index('hive_subscriptions_community_idx', 'community_id'),
+        sa.Index('hive_subscriptions_block_num_idx', 'block_num')
+    )
+
+    sa.Table(
+        'hive_notifs', metadata,
+        sa.Column('id',           sa.Integer,  primary_key=True),
+        sa.Column('block_num',    sa.Integer,  nullable=False),
+        sa.Column('type_id',      SMALLINT,    nullable=False),
+        sa.Column('score',        SMALLINT,    nullable=False),
+        sa.Column('created_at',   sa.DateTime, nullable=False),
+        sa.Column('src_id',       sa.Integer,  nullable=True),
+        sa.Column('dst_id',       sa.Integer,  nullable=True),
+        sa.Column('post_id',      sa.Integer,  nullable=True),
+        sa.Column('community_id', sa.Integer,  nullable=True),
+        sa.Column('block_num',    sa.Integer,  nullable=False),
+        sa.Column('payload',      sa.Text,     nullable=True),
+
+        sa.Index('hive_notifs_ix1', 'dst_id',                  'id', postgresql_where=sql_text("dst_id IS NOT NULL")),
+        sa.Index('hive_notifs_ix2', 'community_id',            'id', postgresql_where=sql_text("community_id IS NOT NULL")),
+        sa.Index('hive_notifs_ix3', 'community_id', 'type_id', 'id', postgresql_where=sql_text("community_id IS NOT NULL")),
+        sa.Index('hive_notifs_ix4', 'community_id', 'post_id', 'type_id', 'id', postgresql_where=sql_text("community_id IS NOT NULL AND post_id IS NOT NULL")),
+        sa.Index('hive_notifs_ix5', 'post_id', 'type_id', 'dst_id', 'src_id', postgresql_where=sql_text("post_id IS NOT NULL AND type_id IN (16,17)")), # filter: dedupe
+        sa.Index('hive_notifs_ix6', 'dst_id', 'created_at', 'score', 'id', postgresql_where=sql_text("dst_id IS NOT NULL")), # unread
+    )
+
+    sa.Table('hive_notification_cache', metadata,
+        sa.Column('id', sa.BigInteger, primary_key=True),
+        sa.Column('block_num', sa.Integer, nullable = False),
+        sa.Column('type_id', sa.Integer, nullable = False),
+        sa.Column('dst', sa.Integer, nullable=True), # dst account id except persistent notifs from hive_notifs
+        sa.Column('src', sa.Integer, nullable=True), # src account id
+        sa.Column('dst_post_id', sa.Integer, nullable=True), # destination post id
+        sa.Column('post_id', sa.Integer, nullable=True),
+        sa.Column('created_at', sa.DateTime, nullable=False), # notification creation time
+        sa.Column('score', sa.Integer, nullable=False),
+        sa.Column('community_title', sa.String(32), nullable=True),
+        sa.Column('community', sa.String(16), nullable=True),
+        sa.Column('payload', sa.String, nullable=True),
+
+        sa.Index('hive_notification_cache_block_num_idx', 'block_num'),
+        sa.Index('hive_notification_cache_dst_score_idx', 'dst', 'score', postgresql_where=sql_text("dst IS NOT NULL"))
+
+    )
+
+    return metadata
+
+
+def teardown(db):
+    """Drop all tables"""
+    build_metadata().drop_all(db.engine())
+
+def drop_fk(db):
+    db.query_no_return("START TRANSACTION")
+    for table in build_metadata().sorted_tables:
+        for fk in table.foreign_keys:
+            sql = """ALTER TABLE {} DROP CONSTRAINT IF EXISTS {}""".format(table.name, fk.name)
+            db.query_no_return(sql)
+    db.query_no_return("COMMIT")
+
+def create_fk(db):
+    from sqlalchemy.schema import AddConstraint
+    from sqlalchemy import text
+    connection = db.engine().connect()
+    connection.execute(text("START TRANSACTION"))
+    for table in build_metadata().sorted_tables:
+        for fk in table.foreign_keys:
+            connection.execute(AddConstraint(fk.constraint))
+    connection.execute(text("COMMIT"))
+
+def setup(db):
+    """Creates all tables and seed data"""
+    # initialize schema
+    build_metadata().create_all(db.engine())
+
+    # tune auto vacuum/analyze
+    reset_autovac(db)
+
+    # sets FILLFACTOR:
+    set_fillfactor(db)
+
+    # default rows
+    sqls = [
+        "INSERT INTO hive_state (block_num, db_version, steem_per_mvest, usd_per_steem, sbd_per_steem, dgpo) VALUES (0, %d, 0, 0, 0, '')" % DB_VERSION,
+        "INSERT INTO hive_blocks (num, hash, created_at) VALUES (0, '0000000000000000000000000000000000000000', '2016-03-24 16:04:57')",
+
+        "INSERT INTO hive_permlink_data (id, permlink) VALUES (0, '')",
+        "INSERT INTO hive_category_data (id, category) VALUES (0, '')",
+        "INSERT INTO hive_tag_data (id, tag) VALUES (0, '')",
+        "INSERT INTO hive_accounts (id, name, created_at) VALUES (0, '', '1970-01-01T00:00:00')",
+
+        "INSERT INTO hive_accounts (name, created_at) VALUES ('miners',    '2016-03-24 16:05:00')",
+        "INSERT INTO hive_accounts (name, created_at) VALUES ('null',      '2016-03-24 16:05:00')",
+        "INSERT INTO hive_accounts (name, created_at) VALUES ('temp',      '2016-03-24 16:05:00')",
+        "INSERT INTO hive_accounts (name, created_at) VALUES ('initminer', '2016-03-24 16:05:00')",
+
+        """
+        INSERT INTO
+            public.hive_posts(id, root_id, parent_id, author_id, permlink_id, category_id,
+                community_id, created_at, depth, block_num, block_num_created
+            )
+        VALUES
+            (0, 0, 0, 0, 0, 0, 0, now(), 0, 0, 0);
+        """]
+    for sql in sqls:
+        db.query(sql)
+
+    sql = "CREATE INDEX hive_communities_ft1 ON hive_communities USING GIN (to_tsvector('english', title || ' ' || about))"
+    db.query(sql)
+
+    # find_comment_id definition moved to utility_functions.sql
+    # find_account_id definition moved to utility_functions.sql
+
+    # process_hive_post_operation definition moved to hive_post_operations.sql
+    # delete_hive_post moved to hive_post_operations.sql
+
+    # In original hivemind, a value of 'active_at' was calculated from
+    # max
+    #   {
+    #     created             ( account_create_operation ),
+    #     last_account_update ( account_update_operation/account_update2_operation ),
+    #     last_post           ( comment_operation - only creation )
+    #     last_root_post      ( comment_operation - only creation + only ROOT ),
+    #     last_vote_time      ( vote_operation )
+    #   }
+    # In order to simplify calculations, `last_account_update` is not taken into consideration, because this updating accounts is very rare
+    # and posting/voting after an account updating, fixes `active_at` value immediately.
+
+    # hive_accounts_view definition moved to hive_accounts_view.sql
+
+    # hive_posts_view definition moved to hive_posts_view.sql
+
+    # update_hive_posts_root_id moved to update_hive_posts_root_id.sql
+
+    # hive_votes_view definition moved into hive_votes_view.sql
+
+    # database_api_vote, find_votes, list_votes_by_voter_comment, list_votes_by_comment_voter moved into database_api_list_votes.sql
+
+    sql = """
+          DO $$
+          BEGIN
+            EXECUTE 'ALTER DATABASE '||current_database()||' SET join_collapse_limit TO 16';
+            EXECUTE 'ALTER DATABASE '||current_database()||' SET from_collapse_limit TO 16';
+          END
+          $$;
+          """
+    db.query_no_return(sql)
+
+    sql = """
+          CREATE TABLE IF NOT EXISTS hive_db_patch_level
+          (
+            level SERIAL NOT NULL PRIMARY KEY,
+            patch_date timestamp without time zone NOT NULL,
+            patched_to_revision TEXT
+          );
+    """
+    db.query_no_return(sql)
+    sql = """
+          INSERT INTO hive_db_patch_level
+          (patch_date, patched_to_revision)
+          values
+          (now(), '{}');
+          """
+
+    from hive.version import GIT_REVISION
+    db.query_no_return(sql.format(GIT_REVISION))
+
+    # max_time_stamp definition moved into utility_functions.sql
+
+    # get_discussion definition moved to bridge_get_discussion.sql
+
+    sql_scripts = [
+      "utility_functions.sql",
+      "hive_accounts_view.sql",
+      "hive_accounts_info_view.sql",
+      "hive_posts_base_view.sql",
+      "hive_posts_view.sql",
+      "hive_votes_view.sql",
+      "hive_post_operations.sql",
+      "head_block_time.sql",
+      "update_feed_cache.sql",
+      "payout_stats_view.sql",
+      "update_hive_posts_mentions.sql",
+      "find_tag_id.sql",
+      "bridge_get_ranked_post_type.sql",
+      "bridge_get_ranked_post_for_communities.sql",
+      "bridge_get_ranked_post_for_observer_communities.sql",
+      "bridge_get_ranked_post_for_tag.sql",
+      "bridge_get_ranked_post_for_all.sql",
+      "calculate_account_reputations.sql",
+      "update_communities_rank.sql",
+      "delete_hive_posts_mentions.sql",
+      "notifications_view.sql",
+      "notifications_api.sql",
+      "bridge_get_account_posts_by_comments.sql",
+      "bridge_get_account_posts_by_payout.sql",
+      "bridge_get_account_posts_by_posts.sql",
+      "bridge_get_account_posts_by_replies.sql",
+      "bridge_get_relationship_between_accounts.sql",
+      "bridge_get_post.sql",
+      "bridge_get_discussion.sql",
+      "condenser_api_post_type.sql",
+      "condenser_api_post_ex_type.sql",
+      "condenser_get_blog.sql",
+      "condenser_get_content.sql",
+      "condenser_get_discussions_by_blog.sql",
+      "condenser_tags.sql",
+      "condenser_follows.sql",
+      "hot_and_trends.sql",
+      "update_hive_posts_children_count.sql",
+      "update_hive_posts_api_helper.sql",
+      "database_api_list_comments.sql",
+      "database_api_list_votes.sql",
+      "update_posts_rshares.sql",
+      "update_hive_post_root_id.sql",
+      "condenser_get_by_replies_to_account.sql",
+      "condenser_get_by_account_comments.sql",
+      "condenser_get_by_blog_without_reblog.sql",
+      "condenser_get_by_feed_with_reblog.sql",
+      "condenser_get_by_blog.sql",
+      "bridge_get_account_posts_by_blog.sql",
+      "condenser_get_names_by_reblogged.sql",
+      "condenser_get_discussions_by_comments.sql",
+      "condenser_get_account_reputations.sql"
+
+    ]
+    from os.path import dirname, realpath
+    dir_path = dirname(realpath(__file__))
+    for script in sql_scripts:
+        execute_sql_script(db.query_no_return, "{}/sql_scripts/{}".format(dir_path, script))
+    
+    
+
+
+
+def reset_autovac(db):
+    """Initializes/resets per-table autovacuum/autoanalyze params.
+
+    We use a scale factor of 0 and specify exact threshold tuple counts,
+    per-table, in the format (autovacuum_threshold, autoanalyze_threshold)."""
+
+    autovac_config = { #    vacuum  analyze
+        'hive_accounts':    (50000, 100000),
+        'hive_posts':       (2500, 10000),
+        'hive_post_tags':   (5000, 10000),
+        'hive_follows':     (5000, 5000),
+        'hive_feed_cache':  (5000, 5000),
+        'hive_blocks':      (5000, 25000),
+        'hive_reblogs':     (5000, 5000),
+        'hive_payments':    (5000, 5000),
+    }
+
+    for table, (n_vacuum, n_analyze) in autovac_config.items():
+        sql = """ALTER TABLE %s SET (autovacuum_vacuum_scale_factor = 0,
+                                     autovacuum_vacuum_threshold = %s,
+                                     autovacuum_analyze_scale_factor = 0,
+                                     autovacuum_analyze_threshold = %s)"""
+        db.query(sql % (table, n_vacuum, n_analyze))
+
+
+def set_fillfactor(db):
+    """Initializes/resets FILLFACTOR for tables which are intesively updated"""
+
+    fillfactor_config = {
+        'hive_posts': 70,
+        'hive_post_data': 70,
+        'hive_votes': 70,
+        'hive_reputation_data': 50
+    }
+
+    for table, fillfactor in fillfactor_config.items():
+        sql = """ALTER TABLE {} SET (FILLFACTOR = {})"""
+        db.query(sql.format(table, fillfactor))
+
+def set_logged_table_attribute(db, logged):
+    """Initializes/resets LOGGED/UNLOGGED attribute for tables which are intesively updated"""
+
+    logged_config = [
+        'hive_accounts',
+        'hive_permlink_data',
+        'hive_post_tags',
+        'hive_posts',
+        'hive_post_data',
+        'hive_votes',
+        'hive_reputation_data'
+    ]
+
+    for table in logged_config:
+        log.info("Setting {} attribute on a table: {}".format('LOGGED' if logged else 'UNLOGGED', table))
+        sql = """ALTER TABLE {} SET {}"""
+        db.query_no_return(sql.format(table, 'LOGGED' if logged else 'UNLOGGED'))
+
+def execute_sql_script(query_executor, path_to_script):
+    """ Load and execute sql script from file
+        Params:
+          query_executor - callable to execute query with
+          path_to_script - path to script
+        Returns:
+          depending on query_executor
+
+        Example:
+          print(execute_sql_script(db.query_row, "./test.sql"))
+          where test_sql: SELECT * FROM hive_state WHERE block_num = 0;
+          will return something like: (0, 18, Decimal('0.000000'), Decimal('0.000000'), Decimal('0.000000'), '')
+    """
+    try:
+        sql_script = None
+        with open(path_to_script, 'r') as sql_script_file:
+            sql_script = sql_script_file.read()
+        if sql_script is not None:
+            return query_executor(sql_script)
+    except Exception as ex:
+        log.exception("Error running sql script: {}".format(ex))
+        raise ex
+    return None
diff --git a/hive/db/sql_scripts/bridge_get_ranked_post_for_all.sql b/hive/db/sql_scripts/bridge_get_ranked_post_for_all.sql
index 556749d787afac9ff0467144e644a1aae32b5e2d..39585907eb734b6d9ef9b39e1544d7048b57ce4e 100644
--- a/hive/db/sql_scripts/bridge_get_ranked_post_for_all.sql
+++ b/hive/db/sql_scripts/bridge_get_ranked_post_for_all.sql
@@ -270,7 +270,7 @@ $function$
 language plpgsql STABLE;
 
 DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_payout;
-CREATE FUNCTION bridge_get_ranked_post_by_payout( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
+CREATE FUNCTION bridge_get_ranked_post_by_payout( in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN )
 RETURNS SETOF bridge_api_post
 AS
 $function$
@@ -328,7 +328,8 @@ BEGIN
         , ( hp1.payout + hp1.pending_payout ) as all_payout
       FROM
           hive_posts hp1
-      WHERE hp1.counter_deleted = 0 AND NOT hp1.is_paidout AND hp1.payout_at BETWEEN __head_block_time + interval '12 hours' AND __head_block_time + interval '36 hours'
+      WHERE hp1.counter_deleted = 0 AND NOT hp1.is_paidout
+          AND ( ( NOT _bridge_api AND hp1.depth = 0 ) OR ( _bridge_api AND hp1.payout_at BETWEEN __head_block_time + interval '12 hours' AND __head_block_time + interval '36 hours' ) )
           AND ( __post_id = 0 OR ( hp1.payout + hp1.pending_payout ) < __payout_limit OR ( ( hp1.payout + hp1.pending_payout ) = __payout_limit AND hp1.id < __post_id ) )
       ORDER BY ( hp1.payout + hp1.pending_payout ) DESC, hp1.id DESC
       LIMIT _limit
diff --git a/hive/db/sql_scripts/bridge_get_ranked_post_for_communities.sql b/hive/db/sql_scripts/bridge_get_ranked_post_for_communities.sql
index 58a167d8cfb6a44f6d5eee79f43aa966a52db4b3..147377f89dba7dbae03f9361ede4c8f9796fa430 100644
--- a/hive/db/sql_scripts/bridge_get_ranked_post_for_communities.sql
+++ b/hive/db/sql_scripts/bridge_get_ranked_post_for_communities.sql
@@ -50,7 +50,7 @@ $function$
 language sql STABLE;
 
 DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_trends_for_community;
-CREATE FUNCTION bridge_get_ranked_post_by_trends_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
+CREATE FUNCTION bridge_get_ranked_post_by_trends_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN )
 RETURNS SETOF bridge_api_post
 AS
 $function$
@@ -107,7 +107,8 @@ BEGIN
       FROM
          hive_posts hp1
          JOIN hive_communities hc ON hp1.community_id = hc.id
-      WHERE hc.name = _community AND hp1.counter_deleted = 0 AND NOT hp1.is_paidout AND hp1.depth = 0 AND NOT hp1.is_pinned -- concatenated with bridge_get_ranked_post_pinned_for_community
+      WHERE hc.name = _community AND hp1.counter_deleted = 0 AND NOT hp1.is_paidout AND hp1.depth = 0
+         AND ( NOT _bridge_api OR NOT hp1.is_pinned ) -- concatenated with bridge_get_ranked_post_pinned_for_community when called for bridge_api
          AND ( __post_id = 0 OR hp1.sc_trend < __trending_limit OR ( hp1.sc_trend = __trending_limit AND hp1.id < __post_id ) )
       ORDER BY hp1.sc_trend DESC, hp1.id DESC
       LIMIT _limit
@@ -473,7 +474,7 @@ $function$
 language plpgsql STABLE;
 
 DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_created_for_community;
-CREATE FUNCTION bridge_get_ranked_post_by_created_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
+CREATE FUNCTION bridge_get_ranked_post_by_created_for_community( in _community VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN )
 RETURNS SETOF bridge_api_post
 AS
 $function$
@@ -526,7 +527,8 @@ BEGIN
           hive_posts hp1
           JOIN hive_communities hc ON hp1.community_id = hc.id
           JOIN hive_accounts_view ha ON hp1.author_id = ha.id
-      WHERE hc.name = _community AND hp1.counter_deleted = 0 AND hp1.depth = 0 AND NOT hp1.is_pinned -- concatenated with bridge_get_ranked_post_pinned_for_community
+      WHERE hc.name = _community AND hp1.counter_deleted = 0 AND hp1.depth = 0
+          AND ( NOT _bridge_api OR NOT hp1.is_pinned ) -- concatenated with bridge_get_ranked_post_pinned_for_community when called for bridge_api
           AND NOT ha.is_grayed AND ( __post_id = 0 OR hp1.id < __post_id )
       ORDER BY hp1.id DESC
       LIMIT _limit
diff --git a/hive/db/sql_scripts/bridge_get_ranked_post_for_tag.sql b/hive/db/sql_scripts/bridge_get_ranked_post_for_tag.sql
index 820f36aeae22f8020241218e96059a10a92359bc..bb8b49d344fe469d530e9078c34eae3080568a5b 100644
--- a/hive/db/sql_scripts/bridge_get_ranked_post_for_tag.sql
+++ b/hive/db/sql_scripts/bridge_get_ranked_post_for_tag.sql
@@ -283,7 +283,7 @@ $function$
 language plpgsql STABLE;
 
 DROP FUNCTION IF EXISTS bridge_get_ranked_post_by_payout_for_category;
-CREATE FUNCTION bridge_get_ranked_post_by_payout_for_category( in _category VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
+CREATE FUNCTION bridge_get_ranked_post_by_payout_for_category( in _category VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT, in _bridge_api BOOLEAN )
 RETURNS SETOF bridge_api_post
 AS
 $function$
@@ -343,7 +343,8 @@ BEGIN
         , ( hp1.payout + hp1.pending_payout ) as all_payout
       FROM
           hive_posts hp1
-      WHERE hp1.category_id = __hive_category AND hp1.counter_deleted = 0 AND NOT hp1.is_paidout AND hp1.payout_at BETWEEN __head_block_time + interval '12 hours' AND __head_block_time + interval '36 hours'
+      WHERE hp1.category_id = __hive_category AND hp1.counter_deleted = 0 AND NOT hp1.is_paidout
+          AND ( ( NOT _bridge_api AND hp1.depth = 0 ) OR ( _bridge_api AND hp1.payout_at BETWEEN __head_block_time + interval '12 hours' AND __head_block_time + interval '36 hours' ) )
           AND ( __post_id = 0 OR ( hp1.payout + hp1.pending_payout ) < __payout_limit OR ( ( hp1.payout + hp1.pending_payout ) = __payout_limit AND hp1.id < __post_id ) )
       ORDER BY ( hp1.payout + hp1.pending_payout ) DESC, hp1.id DESC
       LIMIT _limit
diff --git a/hive/db/sql_scripts/condenser_follows.sql b/hive/db/sql_scripts/condenser_follows.sql
new file mode 100644
index 0000000000000000000000000000000000000000..fbfa033f28006311dca9c04c508d6e21a2d61994
--- /dev/null
+++ b/hive/db/sql_scripts/condenser_follows.sql
@@ -0,0 +1,71 @@
+DROP FUNCTION IF EXISTS condenser_get_follow_count;
+CREATE FUNCTION condenser_get_follow_count( in _account VARCHAR,
+  out following hive_accounts.following%TYPE, out followers hive_accounts.followers%TYPE )
+AS
+$function$
+DECLARE
+  __account_id INT;
+BEGIN
+  __account_id = find_account_id( _account, True );
+  SELECT ha.following, ha.followers INTO following, followers FROM hive_accounts ha WHERE ha.id = __account_id;
+  -- following equals (SELECT COUNT(*) FROM hive_follows WHERE state = 1 AND following = __account_id)
+  -- followers equals (SELECT COUNT(*) FROM hive_follows WHERE state = 1 AND follower  = __account_id)
+END
+$function$
+language plpgsql STABLE;
+
+DROP FUNCTION IF EXISTS condenser_get_followers;
+-- list of account names that follow/ignore given account
+CREATE FUNCTION condenser_get_followers( in _account VARCHAR, in _start VARCHAR, in _type INT, in _limit INT )
+RETURNS SETOF hive_accounts.name%TYPE
+AS
+$function$
+DECLARE
+  __account_id INT;
+  __start_id INT;
+BEGIN
+  __account_id = find_account_id( _account, True );
+  __start_id = find_account_id( _start, _start <> '' );
+  IF __start_id <> 0 THEN
+      SELECT INTO __start_id COALESCE( ( SELECT id FROM hive_follows WHERE following = __account_id AND follower = __start_id ), 0 );
+  END IF;
+  RETURN QUERY SELECT
+     ha.name
+  FROM
+     hive_follows hf
+     JOIN hive_accounts ha ON hf.follower = ha.id
+  WHERE
+     hf.following = __account_id AND hf.state = _type AND ( __start_id = 0 OR hf.id < __start_id )
+  ORDER BY hf.id DESC
+  LIMIT _limit;
+END
+$function$
+language plpgsql STABLE;
+
+DROP FUNCTION IF EXISTS condenser_get_following;
+-- list of account names followed/ignored by given account
+CREATE FUNCTION condenser_get_following( in _account VARCHAR, in _start VARCHAR, in _type INT, in _limit INT )
+RETURNS SETOF hive_accounts.name%TYPE
+AS
+$function$
+DECLARE
+  __account_id INT;
+  __start_id INT;
+BEGIN
+  __account_id = find_account_id( _account, True );
+  __start_id = find_account_id( _start, _start <> '' );
+  IF __start_id <> 0 THEN
+      SELECT INTO __start_id COALESCE( ( SELECT id FROM hive_follows WHERE follower = __account_id AND following = __start_id ), 0 );
+  END IF;
+  RETURN QUERY SELECT
+     ha.name
+  FROM
+     hive_follows hf
+     JOIN hive_accounts ha ON hf.following = ha.id
+  WHERE
+     hf.follower = __account_id AND hf.state = _type AND ( __start_id = 0 OR hf.id < __start_id )
+  ORDER BY hf.id DESC
+  LIMIT _limit;
+END
+$function$
+language plpgsql STABLE;
diff --git a/hive/db/sql_scripts/condenser_get_comment_discussions_by_payout.sql b/hive/db/sql_scripts/condenser_get_comment_discussions_by_payout.sql
deleted file mode 100644
index 4052a096594f89fd37a5ad3d7d013da13dba8ac2..0000000000000000000000000000000000000000
--- a/hive/db/sql_scripts/condenser_get_comment_discussions_by_payout.sql
+++ /dev/null
@@ -1,55 +0,0 @@
-DROP FUNCTION IF EXISTS condenser_get_comment_discussions_by_payout;
-
-CREATE FUNCTION condenser_get_comment_discussions_by_payout( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
-RETURNS SETOF bridge_api_post
-AS
-$function$
-DECLARE
-  __post_id INT;
-  __category_id INT;
-BEGIN
-  __post_id = find_comment_id( _author, _permlink, True );
-  __category_id = ( SELECT id FROM hive_category_data WHERE category = _tag );
-  RETURN QUERY SELECT
-      hp.id,
-      hp.author,
-      hp.parent_author,
-      hp.author_rep,
-      hp.root_title,
-      hp.beneficiaries,
-      hp.max_accepted_payout,
-      hp.percent_hbd,
-      hp.url,
-      hp.permlink,
-      hp.parent_permlink_or_category,
-      hp.title,
-      hp.body,
-      hp.category,
-      hp.depth,
-      hp.promoted,
-      hp.payout,
-      hp.pending_payout,
-      hp.payout_at,
-      hp.is_paidout,
-      hp.children,
-      hp.votes,
-      hp.created_at,
-      hp.updated_at,
-      hp.rshares,
-      hp.abs_rshares,
-      hp.json,
-      hp.is_hidden,
-      hp.is_grayed,
-      hp.total_votes,
-      hp.sc_trend,
-      hp.role_title,
-      hp.community_title,
-      hp.role_id,
-      hp.is_pinned,
-      hp.curator_payout_value
-    FROM hive_posts_view hp
-    WHERE hp.is_paidout = '0' AND hp.depth > 0 AND ( __category_id = 0 OR hp.category_id = __category_id )
-    ORDER BY (hp.payout+hp.pending_payout) DESC, hp.id DESC LIMIT _limit;
-END
-$function$
-language plpgsql STABLE;
diff --git a/hive/db/sql_scripts/condenser_get_discussions_by_created.sql b/hive/db/sql_scripts/condenser_get_discussions_by_created.sql
deleted file mode 100644
index c3a1d6c543e6549962f24b39d8485c73fce83f86..0000000000000000000000000000000000000000
--- a/hive/db/sql_scripts/condenser_get_discussions_by_created.sql
+++ /dev/null
@@ -1,126 +0,0 @@
-DROP FUNCTION IF EXISTS condenser_get_discussions_by_created;
-
-CREATE FUNCTION condenser_get_discussions_by_created( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
-RETURNS SETOF bridge_api_post
-AS
-$function$
-DECLARE
-  __post_id INT;
-  __community VARCHAR;
-  __tag_id INT;
-  __category_id INT;
-BEGIN
-
-  __post_id     = find_comment_id( _author, _permlink, True );
-  __community   = ( SELECT substring(_tag from '^hive-') );
-
-  __tag_id      = ( SELECT id FROM hive_tag_data WHERE tag = _tag );
-  __category_id = ( SELECT id FROM hive_category_data WHERE category = _tag );
-
-  RETURN QUERY SELECT
-      hp.id,
-      hp.author,
-      hp.parent_author,
-      hp.author_rep,
-      hp.root_title,
-      hp.beneficiaries,
-      hp.max_accepted_payout,
-      hp.percent_hbd,
-      hp.url,
-      hp.permlink,
-      hp.parent_permlink_or_category,
-      hp.title,
-      hp.body,
-      hp.category,
-      hp.depth,
-      hp.promoted,
-      hp.payout,
-      hp.pending_payout,
-      hp.payout_at,
-      hp.is_paidout,
-      hp.children,
-      hp.votes,
-      hp.created_at,
-      hp.updated_at,
-      hp.rshares,
-      hp.abs_rshares,
-      hp.json,
-      hp.is_hidden,
-      hp.is_grayed,
-      hp.total_votes,
-      hp.sc_trend,
-      hp.role_title,
-      hp.community_title,
-      hp.role_id,
-      hp.is_pinned,
-      hp.curator_payout_value
-    FROM hive_posts_view hp
-    INNER JOIN hive_post_tags hpt ON hpt.post_id = hp.id
-    WHERE ( __tag_id = 0 OR hpt.tag_id = __tag_id ) AND ( __post_id = 0 OR hp.id < __post_id )
-          AND ( ( __community IS NULL ) OR ( ( __community IS NOT NULL ) AND ( __category_id = 0 OR hp.category_id = __category_id ) ) )
-    ORDER BY hp.id DESC LIMIT _limit;
-END
-$function$
-language plpgsql STABLE;
-
-DROP FUNCTION IF EXISTS condenser_get_discussions_by_created_with_empty_tag;
-
-CREATE FUNCTION condenser_get_discussions_by_created_with_empty_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
-RETURNS SETOF bridge_api_post
-AS
-$function$
-DECLARE
-  __post_id INT;
-  __community VARCHAR;
-  __category_id INT;
-BEGIN
-
-  __post_id     = find_comment_id( _author, _permlink, True );
-  __community   = ( SELECT substring(_tag from '^hive-') );
-
-  __category_id = ( SELECT id FROM hive_category_data WHERE category = _tag );
-
-  RETURN QUERY SELECT
-      hp.id,
-      hp.author,
-      hp.parent_author,
-      hp.author_rep,
-      hp.root_title,
-      hp.beneficiaries,
-      hp.max_accepted_payout,
-      hp.percent_hbd,
-      hp.url,
-      hp.permlink,
-      hp.parent_permlink_or_category,
-      hp.title,
-      hp.body,
-      hp.category,
-      hp.depth,
-      hp.promoted,
-      hp.payout,
-      hp.pending_payout,
-      hp.payout_at,
-      hp.is_paidout,
-      hp.children,
-      hp.votes,
-      hp.created_at,
-      hp.updated_at,
-      hp.rshares,
-      hp.abs_rshares,
-      hp.json,
-      hp.is_hidden,
-      hp.is_grayed,
-      hp.total_votes,
-      hp.sc_trend,
-      hp.role_title,
-      hp.community_title,
-      hp.role_id,
-      hp.is_pinned,
-      hp.curator_payout_value
-    FROM hive_posts_view hp
-    WHERE ( __post_id = 0 OR hp.id < __post_id )
-          AND ( ( __community IS NULL ) OR ( ( __community IS NOT NULL ) AND ( __category_id = 0 OR hp.category_id = __category_id ) ) )
-    ORDER BY hp.id DESC LIMIT _limit;
-END
-$function$
-language plpgsql STABLE;
diff --git a/hive/db/sql_scripts/condenser_get_discussions_by_hot.sql b/hive/db/sql_scripts/condenser_get_discussions_by_hot.sql
deleted file mode 100644
index 981ff24fdb70f8bed492f210231f2340ccfc90dd..0000000000000000000000000000000000000000
--- a/hive/db/sql_scripts/condenser_get_discussions_by_hot.sql
+++ /dev/null
@@ -1,126 +0,0 @@
-DROP FUNCTION IF EXISTS condenser_get_discussions_by_hot;
-
-CREATE FUNCTION condenser_get_discussions_by_hot( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
-RETURNS SETOF bridge_api_post
-AS
-$function$
-DECLARE
-  __post_id INT;
-  __community VARCHAR;
-  __tag_id INT;
-  __category_id INT;
-BEGIN
-
-  __post_id     = find_comment_id( _author, _permlink, True );
-  __community   = ( SELECT substring(_tag from '^hive-') );
-
-  __tag_id      = ( SELECT id FROM hive_tag_data WHERE tag = _tag );
-  __category_id = ( SELECT id FROM hive_category_data WHERE category = _tag );
-
-  RETURN QUERY SELECT
-      hp.id,
-      hp.author,
-      hp.parent_author,
-      hp.author_rep,
-      hp.root_title,
-      hp.beneficiaries,
-      hp.max_accepted_payout,
-      hp.percent_hbd,
-      hp.url,
-      hp.permlink,
-      hp.parent_permlink_or_category,
-      hp.title,
-      hp.body,
-      hp.category,
-      hp.depth,
-      hp.promoted,
-      hp.payout,
-      hp.pending_payout,
-      hp.payout_at,
-      hp.is_paidout,
-      hp.children,
-      hp.votes,
-      hp.created_at,
-      hp.updated_at,
-      hp.rshares,
-      hp.abs_rshares,
-      hp.json,
-      hp.is_hidden,
-      hp.is_grayed,
-      hp.total_votes,
-      hp.sc_trend,
-      hp.role_title,
-      hp.community_title,
-      hp.role_id,
-      hp.is_pinned,
-      hp.curator_payout_value
-    FROM hive_posts_view hp
-    INNER JOIN hive_post_tags hpt ON hpt.post_id = hp.id
-    WHERE hp.is_paidout = '0' AND ( __tag_id = 0 OR hpt.tag_id = __tag_id ) AND ( __post_id = 0 OR hp.id < __post_id )
-          AND ( ( __community IS NULL ) OR ( ( __community IS NOT NULL ) AND ( ( __category_id = 0 OR hp.category_id = __category_id ) AND hp.depth = 0 ) ) )
-    ORDER BY hp.sc_hot DESC, hp.id DESC LIMIT _limit;
-END
-$function$
-language plpgsql STABLE;
-
-DROP FUNCTION IF EXISTS condenser_get_discussions_by_hot_with_empty_tag;
-
-CREATE FUNCTION condenser_get_discussions_by_hot_with_empty_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
-RETURNS SETOF bridge_api_post
-AS
-$function$
-DECLARE
-  __post_id INT;
-  __community VARCHAR;
-  __category_id INT;
-BEGIN
-
-  __post_id     = find_comment_id( _author, _permlink, True );
-  __community   = ( SELECT substring(_tag from '^hive-') );
-
-  __category_id = ( SELECT id FROM hive_category_data WHERE category = _tag );
-
-  RETURN QUERY SELECT
-      hp.id,
-      hp.author,
-      hp.parent_author,
-      hp.author_rep,
-      hp.root_title,
-      hp.beneficiaries,
-      hp.max_accepted_payout,
-      hp.percent_hbd,
-      hp.url,
-      hp.permlink,
-      hp.parent_permlink_or_category,
-      hp.title,
-      hp.body,
-      hp.category,
-      hp.depth,
-      hp.promoted,
-      hp.payout,
-      hp.pending_payout,
-      hp.payout_at,
-      hp.is_paidout,
-      hp.children,
-      hp.votes,
-      hp.created_at,
-      hp.updated_at,
-      hp.rshares,
-      hp.abs_rshares,
-      hp.json,
-      hp.is_hidden,
-      hp.is_grayed,
-      hp.total_votes,
-      hp.sc_trend,
-      hp.role_title,
-      hp.community_title,
-      hp.role_id,
-      hp.is_pinned,
-      hp.curator_payout_value
-    FROM hive_posts_view hp
-    WHERE hp.is_paidout = '0' AND ( __post_id = 0 OR hp.id < __post_id )
-          AND ( ( __community IS NULL ) OR ( ( __community IS NOT NULL ) AND ( ( __category_id = 0 OR hp.category_id = __category_id ) AND hp.depth = 0 ) ) )
-    ORDER BY hp.sc_hot DESC, hp.id DESC LIMIT _limit;
-END
-$function$
-language plpgsql STABLE;
diff --git a/hive/db/sql_scripts/condenser_get_discussions_by_promoted.sql b/hive/db/sql_scripts/condenser_get_discussions_by_promoted.sql
deleted file mode 100644
index d9afedb60212cfeab6ee78d59427bc2a2ae9db22..0000000000000000000000000000000000000000
--- a/hive/db/sql_scripts/condenser_get_discussions_by_promoted.sql
+++ /dev/null
@@ -1,127 +0,0 @@
-DROP FUNCTION IF EXISTS condenser_get_discussions_by_promoted;
-
-CREATE FUNCTION condenser_get_discussions_by_promoted( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
-RETURNS SETOF bridge_api_post
-AS
-$function$
-DECLARE
-  __post_id INT;
-  __community VARCHAR;
-  __tag_id INT;
-  __category_id INT;
-BEGIN
-
-  __post_id     = find_comment_id( _author, _permlink, True );
-  __community   = ( SELECT substring(_tag from '^hive-') );
-
-  __tag_id      = ( SELECT id FROM hive_tag_data WHERE tag = _tag );
-  __category_id = ( SELECT id FROM hive_category_data WHERE category = _tag );
-
-  RETURN QUERY SELECT
-      hp.id,
-      hp.author,
-      hp.parent_author,
-      hp.author_rep,
-      hp.root_title,
-      hp.beneficiaries,
-      hp.max_accepted_payout,
-      hp.percent_hbd,
-      hp.url,
-      hp.permlink,
-      hp.parent_permlink_or_category,
-      hp.title,
-      hp.body,
-      hp.category,
-      hp.depth,
-      hp.promoted,
-      hp.payout,
-      hp.pending_payout,
-      hp.payout_at,
-      hp.is_paidout,
-      hp.children,
-      hp.votes,
-      hp.created_at,
-      hp.updated_at,
-      hp.rshares,
-      hp.abs_rshares,
-      hp.json,
-      hp.is_hidden,
-      hp.is_grayed,
-      hp.total_votes,
-      hp.sc_trend,
-      hp.role_title,
-      hp.community_title,
-      hp.role_id,
-      hp.is_pinned,
-      hp.curator_payout_value
-    FROM hive_posts_view hp
-    INNER JOIN hive_post_tags hpt ON hpt.post_id = hp.id
-    WHERE hp.promoted > 0 AND ( __tag_id = 0 OR hpt.tag_id = __tag_id ) AND ( __post_id = 0 OR hp.id < __post_id )
-          AND ( ( __community IS NULL ) OR ( ( __community IS NOT NULL ) AND ( __category_id = 0 OR hp.category_id = __category_id ) ) )
-    ORDER BY hp.promoted DESC, hp.id DESC LIMIT _limit;
-END
-$function$
-language plpgsql STABLE;
-
-DROP FUNCTION IF EXISTS condenser_get_discussions_by_promoted_with_empty_tag;
-
-CREATE FUNCTION condenser_get_discussions_by_promoted_with_empty_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
-RETURNS SETOF bridge_api_post
-AS
-$function$
-DECLARE
-  __post_id INT;
-  __community VARCHAR;
-  __category_id INT;
-BEGIN
-
-  __post_id     = find_comment_id( _author, _permlink, True );
-  __community   = ( SELECT substring(_tag from '^hive-') );
-
-  __category_id = ( SELECT id FROM hive_category_data WHERE category = _tag );
-
-  RETURN QUERY SELECT
-      hp.id,
-      hp.author,
-      hp.parent_author,
-      hp.author_rep,
-      hp.root_title,
-      hp.beneficiaries,
-      hp.max_accepted_payout,
-      hp.percent_hbd,
-      hp.url,
-      hp.permlink,
-      hp.parent_permlink_or_category,
-      hp.title,
-      hp.body,
-      hp.category,
-      hp.depth,
-      hp.promoted,
-      hp.payout,
-      hp.pending_payout,
-      hp.payout_at,
-      hp.is_paidout,
-      hp.children,
-      hp.votes,
-      hp.created_at,
-      hp.updated_at,
-      hp.rshares,
-      hp.abs_rshares,
-      hp.json,
-      hp.is_hidden,
-      hp.is_grayed,
-      hp.total_votes,
-      hp.sc_trend,
-      hp.role_title,
-      hp.community_title,
-      hp.role_id,
-      hp.is_pinned,
-      hp.curator_payout_value
-    FROM hive_posts_view hp
-    INNER JOIN hive_post_tags hpt ON hpt.post_id = hp.id
-    WHERE hp.promoted > 0 AND ( __post_id = 0 OR hp.id < __post_id )
-          AND ( ( __community IS NULL ) OR ( ( __community IS NOT NULL ) AND ( __category_id = 0 OR hp.category_id = __category_id ) ) )
-    ORDER BY hp.promoted DESC, hp.id DESC LIMIT _limit;
-END
-$function$
-language plpgsql STABLE;
diff --git a/hive/db/sql_scripts/condenser_get_discussions_by_trending.sql b/hive/db/sql_scripts/condenser_get_discussions_by_trending.sql
deleted file mode 100644
index 277e845083349a2c11365d2937fba8880e10aa33..0000000000000000000000000000000000000000
--- a/hive/db/sql_scripts/condenser_get_discussions_by_trending.sql
+++ /dev/null
@@ -1,126 +0,0 @@
-DROP FUNCTION IF EXISTS condenser_get_discussions_by_trending;
-
-CREATE FUNCTION condenser_get_discussions_by_trending( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
-RETURNS SETOF bridge_api_post
-AS
-$function$
-DECLARE
-  __post_id INT;
-  __community VARCHAR;
-  __tag_id INT;
-  __category_id INT;
-BEGIN
-
-  __post_id     = find_comment_id( _author, _permlink, True );
-  __community   = ( SELECT substring(_tag from '^hive-') );
-
-  __tag_id      = ( SELECT id FROM hive_tag_data WHERE tag = _tag );
-  __category_id = ( SELECT id FROM hive_category_data WHERE category = _tag );
-
-  RETURN QUERY SELECT
-      hp.id,
-      hp.author,
-      hp.parent_author,
-      hp.author_rep,
-      hp.root_title,
-      hp.beneficiaries,
-      hp.max_accepted_payout,
-      hp.percent_hbd,
-      hp.url,
-      hp.permlink,
-      hp.parent_permlink_or_category,
-      hp.title,
-      hp.body,
-      hp.category,
-      hp.depth,
-      hp.promoted,
-      hp.payout,
-      hp.pending_payout,
-      hp.payout_at,
-      hp.is_paidout,
-      hp.children,
-      hp.votes,
-      hp.created_at,
-      hp.updated_at,
-      hp.rshares,
-      hp.abs_rshares,
-      hp.json,
-      hp.is_hidden,
-      hp.is_grayed,
-      hp.total_votes,
-      hp.sc_trend,
-      hp.role_title,
-      hp.community_title,
-      hp.role_id,
-      hp.is_pinned,
-      hp.curator_payout_value
-    FROM hive_posts_view hp
-    INNER JOIN hive_post_tags hpt ON hpt.post_id = hp.id
-    WHERE hp.is_paidout = '0' AND ( __tag_id = 0 OR hpt.tag_id = __tag_id ) AND ( __post_id = 0 OR hp.id < __post_id )
-          AND ( ( __community IS NULL ) OR ( ( __community IS NOT NULL ) AND ( ( __category_id = 0 OR hp.category_id = __category_id ) AND hp.depth = 0 ) ) )
-    ORDER BY hp.sc_trend DESC, hp.id DESC LIMIT _limit;
-END
-$function$
-language plpgsql STABLE;
-
-DROP FUNCTION IF EXISTS condenser_get_discussions_by_trending_with_empty_tag;
-
-CREATE FUNCTION condenser_get_discussions_by_trending_with_empty_tag( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
-RETURNS SETOF bridge_api_post
-AS
-$function$
-DECLARE
-  __post_id INT;
-  __community VARCHAR;
-  __category_id INT;
-BEGIN
-
-  __post_id     = find_comment_id( _author, _permlink, True );
-  __community   = ( SELECT substring(_tag from '^hive-') );
-
-  __category_id = ( SELECT id FROM hive_category_data WHERE category = _tag );
-
-  RETURN QUERY SELECT
-      hp.id,
-      hp.author,
-      hp.parent_author,
-      hp.author_rep,
-      hp.root_title,
-      hp.beneficiaries,
-      hp.max_accepted_payout,
-      hp.percent_hbd,
-      hp.url,
-      hp.permlink,
-      hp.parent_permlink_or_category,
-      hp.title,
-      hp.body,
-      hp.category,
-      hp.depth,
-      hp.promoted,
-      hp.payout,
-      hp.pending_payout,
-      hp.payout_at,
-      hp.is_paidout,
-      hp.children,
-      hp.votes,
-      hp.created_at,
-      hp.updated_at,
-      hp.rshares,
-      hp.abs_rshares,
-      hp.json,
-      hp.is_hidden,
-      hp.is_grayed,
-      hp.total_votes,
-      hp.sc_trend,
-      hp.role_title,
-      hp.community_title,
-      hp.role_id,
-      hp.is_pinned,
-      hp.curator_payout_value
-    FROM hive_posts_view hp
-    WHERE hp.is_paidout = '0' AND ( __post_id = 0 OR hp.id < __post_id )
-          AND ( ( __community IS NULL ) OR ( ( __community IS NOT NULL ) AND ( ( __category_id = 0 OR hp.category_id = __category_id ) AND hp.depth = 0 ) ) )
-    ORDER BY hp.sc_trend DESC, hp.id DESC LIMIT _limit;
-END
-$function$
-language plpgsql STABLE;
diff --git a/hive/db/sql_scripts/condenser_get_follow_counts.sql b/hive/db/sql_scripts/condenser_get_follow_counts.sql
deleted file mode 100644
index b6db581b5842c595ff8b04cc5070e12c838b4b63..0000000000000000000000000000000000000000
--- a/hive/db/sql_scripts/condenser_get_follow_counts.sql
+++ /dev/null
@@ -1,20 +0,0 @@
-DROP FUNCTION IF EXISTS condenser_get_follow_counts;
-
-CREATE FUNCTION condenser_get_follow_counts( in _account VARCHAR )
-RETURNS TABLE(
-    following hive_accounts.following%TYPE,
-    followers hive_accounts.followers%TYPE
-)
-AS
-$function$
-DECLARE
-BEGIN
-
-  RETURN QUERY SELECT
-    ha.following, ha.followers
-    FROM hive_accounts ha
-    WHERE ha.name = _account;
-
-END
-$function$
-language plpgsql STABLE;
diff --git a/hive/db/sql_scripts/condenser_get_names_by_followers.sql b/hive/db/sql_scripts/condenser_get_names_by_followers.sql
deleted file mode 100644
index 530a06c5c6550ceb22d43cca608a969c8587002c..0000000000000000000000000000000000000000
--- a/hive/db/sql_scripts/condenser_get_names_by_followers.sql
+++ /dev/null
@@ -1,38 +0,0 @@
-DROP FUNCTION IF EXISTS condenser_get_names_by_followers;
-
-CREATE FUNCTION condenser_get_names_by_followers( in _account VARCHAR, in _start_account VARCHAR, in _state SMALLINT, _limit SMALLINT )
-RETURNS TABLE(
-    names hive_accounts.name%TYPE
-)
-AS
-$function$
-DECLARE
-  __account_id INT := find_account_id( _account, True );
-  __start_account_id INT := 0;
-  __created_at TIMESTAMP;
-BEGIN
-
-  IF _start_account <> '' THEN
-    __start_account_id = find_account_id( _start_account, True );
-  END IF;
-
-  IF __start_account_id <> 0 THEN
-    SELECT hf.created_at
-    INTO __created_at
-    FROM hive_follows hf
-    WHERE hf.following = __account_id AND hf.follower = __start_account_id;
-  END IF;
-
-  RETURN QUERY SELECT
-    name
-  FROM hive_follows hf
-  LEFT JOIN hive_accounts ha ON hf.follower = ha.id
-  WHERE hf.following = __account_id
-  AND state = _state
-  AND ( __start_account_id = 0 OR hf.created_at <= __created_at )
-  ORDER BY hf.created_at DESC
-  LIMIT _limit;
-
-END
-$function$
-language plpgsql STABLE;
diff --git a/hive/db/sql_scripts/condenser_get_names_by_following.sql b/hive/db/sql_scripts/condenser_get_names_by_following.sql
deleted file mode 100644
index 1b7f48a00e6f6532ce016185899622938060162c..0000000000000000000000000000000000000000
--- a/hive/db/sql_scripts/condenser_get_names_by_following.sql
+++ /dev/null
@@ -1,38 +0,0 @@
-DROP FUNCTION IF EXISTS condenser_get_names_by_following;
-
-CREATE FUNCTION condenser_get_names_by_following( in _account VARCHAR, in _start_account VARCHAR, in _state SMALLINT, _limit SMALLINT )
-RETURNS TABLE(
-    names hive_accounts.name%TYPE
-)
-AS
-$function$
-DECLARE
-  __account_id INT := find_account_id( _account, True );
-  __start_account_id INT := 0;
-  __created_at TIMESTAMP;
-BEGIN
-
-  IF _start_account <> '' THEN
-    __start_account_id = find_account_id( _start_account, True );
-  END IF;
-
-  IF __start_account_id <> 0 THEN
-    SELECT hf.created_at
-    INTO __created_at
-    FROM hive_follows hf
-    WHERE hf.follower = __account_id AND hf.following = __start_account_id;
-  END IF;
-
-  RETURN QUERY SELECT
-    name
-  FROM hive_follows hf
-  LEFT JOIN hive_accounts ha ON hf.following = ha.id
-  WHERE hf.follower = __account_id
-  AND state = _state
-  AND ( __start_account_id = 0 OR hf.created_at <= __created_at )
-  ORDER BY hf.created_at DESC
-  LIMIT _limit;
-
-END
-$function$
-language plpgsql STABLE;
diff --git a/hive/db/sql_scripts/condenser_get_post_discussions_by_payout.sql b/hive/db/sql_scripts/condenser_get_post_discussions_by_payout.sql
deleted file mode 100644
index 4996aaa81a78874cede434a1db0a0f0e2d80af62..0000000000000000000000000000000000000000
--- a/hive/db/sql_scripts/condenser_get_post_discussions_by_payout.sql
+++ /dev/null
@@ -1,55 +0,0 @@
-DROP FUNCTION IF EXISTS condenser_get_post_discussions_by_payout;
-
-CREATE FUNCTION condenser_get_post_discussions_by_payout( in _tag VARCHAR, in _author VARCHAR, in _permlink VARCHAR, in _limit SMALLINT )
-RETURNS SETOF bridge_api_post
-AS
-$function$
-DECLARE
-  __post_id INT;
-  __category_id INT;
-BEGIN
-  __post_id = find_comment_id( _author, _permlink, True );
-  __category_id = ( SELECT id FROM hive_category_data WHERE category = _tag );
-  RETURN QUERY SELECT
-      hp.id,
-      hp.author,
-      hp.parent_author,
-      hp.author_rep,
-      hp.root_title,
-      hp.beneficiaries,
-      hp.max_accepted_payout,
-      hp.percent_hbd,
-      hp.url,
-      hp.permlink,
-      hp.parent_permlink_or_category,
-      hp.title,
-      hp.body,
-      hp.category,
-      hp.depth,
-      hp.promoted,
-      hp.payout,
-      hp.pending_payout,
-      hp.payout_at,
-      hp.is_paidout,
-      hp.children,
-      hp.votes,
-      hp.created_at,
-      hp.updated_at,
-      hp.rshares,
-      hp.abs_rshares,
-      hp.json,
-      hp.is_hidden,
-      hp.is_grayed,
-      hp.total_votes,
-      hp.sc_trend,
-      hp.role_title,
-      hp.community_title,
-      hp.role_id,
-      hp.is_pinned,
-      hp.curator_payout_value
-    FROM hive_posts_view hp
-    WHERE hp.is_paidout = '0' AND hp.depth = 0 AND ( __category_id = 0 OR hp.category_id = __category_id )
-    ORDER BY (hp.payout+hp.pending_payout) DESC, hp.id DESC LIMIT _limit;
-END
-$function$
-language plpgsql STABLE;
diff --git a/hive/db/sql_scripts/condenser_tags.sql b/hive/db/sql_scripts/condenser_tags.sql
new file mode 100644
index 0000000000000000000000000000000000000000..0812832b4b04ef8eccef4f6aed217d057a893915
--- /dev/null
+++ b/hive/db/sql_scripts/condenser_tags.sql
@@ -0,0 +1,50 @@
+DROP FUNCTION IF EXISTS condenser_get_top_trending_tags_summary;
+CREATE FUNCTION condenser_get_top_trending_tags_summary( in _limit INT )
+RETURNS SETOF VARCHAR
+AS
+$function$
+BEGIN
+  RETURN QUERY SELECT
+      hcd.category
+  FROM
+      hive_category_data hcd
+      JOIN hive_posts hp ON hp.category_id = hcd.id
+  WHERE hp.counter_deleted = 0 AND NOT hp.is_paidout
+  GROUP BY hcd.category
+  ORDER BY SUM(hp.payout + hp.pending_payout) DESC
+  LIMIT _limit;
+END
+$function$
+language plpgsql STABLE;
+
+DROP FUNCTION IF EXISTS condenser_get_trending_tags;
+CREATE FUNCTION condenser_get_trending_tags( in _category VARCHAR, in _limit INT )
+RETURNS TABLE( category VARCHAR, total_posts BIGINT, top_posts BIGINT, total_payouts hive_posts.payout%TYPE )
+AS
+$function$
+DECLARE
+  __category_id INT;
+  __payout_limit hive_posts.payout%TYPE;
+BEGIN
+  __category_id = find_category_id( _category, _category <> '' );
+  IF __category_id <> 0 THEN
+      SELECT SUM(hp.payout + hp.pending_payout) INTO __payout_limit
+      FROM hive_posts hp
+      WHERE hp.category_id = __category_id AND hp.counter_deleted = 0 AND NOT hp.is_paidout;
+  END IF;
+  RETURN QUERY SELECT
+      hcd.category,
+      COUNT(*) AS total_posts,
+      SUM(CASE WHEN hp.depth = 0 THEN 1 ELSE 0 END) AS top_posts,
+      SUM(hp.payout + hp.pending_payout) AS total_payouts
+  FROM
+      hive_posts hp
+      JOIN hive_category_data hcd ON hcd.id = hp.category_id
+  WHERE NOT hp.is_paidout AND counter_deleted = 0
+  GROUP BY hcd.category
+  HAVING __category_id = 0 OR SUM(hp.payout + hp.pending_payout) < __payout_limit OR ( SUM(hp.payout + hp.pending_payout) = __payout_limit AND hcd.category > _category )
+  ORDER BY SUM(hp.payout + hp.pending_payout) DESC, hcd.category ASC
+  LIMIT _limit;
+END
+$function$
+language plpgsql STABLE;
diff --git a/hive/db/sql_scripts/db_upgrade.sh b/hive/db/sql_scripts/db_upgrade.sh
index 0bb9b175945e852fda8d4bfa5c59d717bd113689..f88a769c622f83d501424d99230b333955cb194a 100755
--- a/hive/db/sql_scripts/db_upgrade.sh
+++ b/hive/db/sql_scripts/db_upgrade.sh
@@ -41,14 +41,10 @@ for sql in postgres_handle_view_changes.sql \
           condenser_api_post_ex_type.sql \
           condenser_get_blog.sql \
           condenser_get_content.sql \
-          condenser_get_discussions_by_created.sql \
           condenser_get_discussions_by_blog.sql \
+          condenser_tags.sql \
+          condenser_follows.sql \
           hot_and_trends.sql \
-          condenser_get_discussions_by_trending.sql \
-          condenser_get_discussions_by_hot.sql \
-          condenser_get_discussions_by_promoted.sql \
-          condenser_get_post_discussions_by_payout.sql \
-          condenser_get_comment_discussions_by_payout.sql \
           update_hive_posts_children_count.sql \
           update_hive_posts_api_helper.sql \
           database_api_list_comments.sql \
@@ -61,14 +57,13 @@ for sql in postgres_handle_view_changes.sql \
           condenser_get_by_feed_with_reblog.sql \
           condenser_get_by_blog.sql \
           bridge_get_account_posts_by_blog.sql \
-          condenser_get_follow_counts.sql \
-          condenser_get_names_by_followers.sql \
-          condenser_get_names_by_following.sql \
-          condenser_get_names_by_reblogged.sql
+          condenser_get_names_by_reblogged.sql \
+          condenser_get_discussions_by_comments.sql \
+          condenser_get_account_reputations.sql
 
 do
-	echo Executing psql -U $1 -d $2 -f $sql
-	time psql -1 -v "ON_ERROR_STOP=1" -U $1 -d $2  -c '\timing' -f $sql 2>&1 | tee -a -i upgrade.log
+    echo Executing psql -U $1 -d $2 -f $sql
+    time psql -1 -v "ON_ERROR_STOP=1" -U $1 -d $2  -c '\timing' -f $sql 2>&1 | tee -a -i upgrade.log
   echo $?
 done
 
diff --git a/hive/server/bridge_api/methods.py b/hive/server/bridge_api/methods.py
index 839720f3c5ea33b3f1f3e0f001555a3c7bb8380c..ea8bd756708d6b6d6a48abfe0715b5cd078a170b 100644
--- a/hive/server/bridge_api/methods.py
+++ b/hive/server/bridge_api/methods.py
@@ -117,7 +117,7 @@ async def _get_ranked_posts_for_communities( db, sort:str, community, start_auth
 
     if sort == 'trending':
         result_with_pinned_posts = []
-        sql = "SELECT * FROM bridge_get_ranked_post_by_trends_for_community( (:community)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+        sql = "SELECT * FROM bridge_get_ranked_post_by_trends_for_community( (:community)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, True )"
         result_with_pinned_posts = await execute_community_query(db, pinned_sql, limit)
         limit -= len(result_with_pinned_posts)
         if limit > 0:
@@ -129,7 +129,7 @@ async def _get_ranked_posts_for_communities( db, sort:str, community, start_auth
         return await execute_community_query(db, sql, limit)
 
     if sort == 'created':
-        sql = "SELECT * FROM bridge_get_ranked_post_by_created_for_community( (:community)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+        sql = "SELECT * FROM bridge_get_ranked_post_by_created_for_community( (:community)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, True )"
         result_with_pinned_posts = await execute_community_query(db, pinned_sql, limit)
         limit -= len(result_with_pinned_posts)
         if limit > 0:
@@ -165,7 +165,7 @@ async def _get_ranked_posts_for_tag( db, sort:str, tag, start_author:str, start_
         return await execute_tags_query(db, sql, limit)
 
     if sort == 'payout':
-        sql = "SELECT * FROM bridge_get_ranked_post_by_payout_for_category( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+        sql = "SELECT * FROM bridge_get_ranked_post_by_payout_for_category( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, True )"
         return await execute_tags_query(db, sql, limit)
 
     if sort == 'payout_comments':
@@ -208,7 +208,7 @@ async def _get_ranked_posts_for_all( db, sort:str, start_author:str, start_perml
         return await execute_query(db, sql, limit)
 
     if sort == 'payout':
-        sql = "SELECT * FROM bridge_get_ranked_post_by_payout( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+        sql = "SELECT * FROM bridge_get_ranked_post_by_payout( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, True )"
         return await execute_query(db, sql, limit)
 
     if sort == 'payout_comments':
diff --git a/hive/server/common/helpers.py b/hive/server/common/helpers.py
index 14feebb189fd21c95e7a4fd0ef654265862cf3dc..973246279726048958c56ef038243dd994bc685b 100644
--- a/hive/server/common/helpers.py
+++ b/hive/server/common/helpers.py
@@ -155,8 +155,10 @@ def valid_offset(offset, ubound=None):
 
 def valid_follow_type(follow_type: str):
     """Ensure follow type is valid steemd type."""
-    assert follow_type in ['blog', 'ignore'], 'invalid follow_type `%s`' % follow_type
-    return follow_type
+    # ABW: should be extended with blacklists etc. (and those should be implemented as next 'state' values)
+    supported_follow_types = dict(blog=1, ignore=2)
+    assert follow_type in supported_follow_types, "Unsupported follow type, valid types: {}".format(", ".join(supported_follow_types.keys()))
+    return supported_follow_types[follow_type]
 
 def valid_date(date, allow_empty=False):
     """ Ensure that date is in correct format """
diff --git a/hive/server/condenser_api/cursor.py b/hive/server/condenser_api/cursor.py
index 585c74bc268522d5023c22209d7b5d2da279a39d..763ddd17b35104c56b40f0686925c68d47d3be57 100644
--- a/hive/server/condenser_api/cursor.py
+++ b/hive/server/condenser_api/cursor.py
@@ -1,70 +1,62 @@
-"""Cursor-based pagination queries, mostly supporting condenser_api."""
-
-from hive.server.common.helpers import last_month
-
-from hive.server.condenser_api.objects import _condenser_post_object
-from hive.server.database_api.methods import find_votes_impl, VotesPresentation
-
-# pylint: disable=too-many-lines
-
-async def get_followers(db, account: str, start: str, follow_type: str, limit: int):
-    """Get a list of accounts following by a given account."""
-    state = 2 if follow_type == 'ignore' else 1
-    sql = " SELECT * FROM condenser_get_names_by_followers( '{}', '{}', {}::SMALLINT, {}::SMALLINT ) ".format( account, start, state, limit )
-
-    return await db.query_col(sql)
-
-async def get_following(db, account: str, start: str, follow_type: str, limit: int):
-    """Get a list of accounts followed by a given account."""
-    state = 2 if follow_type == 'ignore' else 1
-    sql = " SELECT * FROM condenser_get_names_by_following( '{}', '{}', {}::SMALLINT, {}::SMALLINT ) ".format( account, start, state, limit )
-
-    return await db.query_col(sql)
-
-async def get_follow_counts(db, account: str):
-    """Return following/followers count for `account`."""
-    sql = "SELECT * FROM condenser_get_follow_counts( '{}' )".format( account )
-    return dict (await db.query_row(sql) )
-
-async def get_reblogged_by(db, author: str, permlink: str):
-    """Return all rebloggers of a post."""
-
-    sql = "SELECT * FROM condenser_get_names_by_reblogged( '{}', '{}' )".format( author, permlink )
-    names = await db.query_col(sql)
-
-    if author in names:
-        names.remove(author)
-    return names
-
-async def get_data(db, sql:str, truncate_body: int = 0):
-    result = await db.query_all(sql); 
-
-    posts = []
-    for row in result:
-        row = dict(row)
-        post = _condenser_post_object(row, truncate_body=truncate_body)
-
-        post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi)
-        posts.append(post)
-
-    return posts
-
-async def get_by_blog_without_reblog(db, account: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0):
-  """Get a list of posts for an author's blog without reblogs."""
-  sql = " SELECT * FROM condenser_get_by_blog_without_reblog( '{}', '{}', {} ) ".format( account, start_permlink, limit )
-  return await get_data(db, sql, truncate_body )
-
-async def get_by_account_comments(db, account: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0):
-  """Get a list of posts representing comments by an author."""
-  sql = " SELECT * FROM condenser_get_by_account_comments( '{}', '{}', {} ) ".format( account, start_permlink, limit )
-  return await get_data(db, sql, truncate_body )
-
-async def get_by_replies_to_account(db, start_author: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0):
-  """Get a list of posts representing replies to an author."""
-  sql = " SELECT * FROM condenser_get_by_replies_to_account( '{}', '{}', {} ) ".format( start_author, start_permlink, limit )
-  return await get_data(db, sql, truncate_body )
-
-async def get_by_blog(db, account: str = '', start_author: str = '', start_permlink: str = '', limit: int = 20):
-  """Get a list of posts for an author's blog."""
-  sql = " SELECT * FROM condenser_get_by_blog( '{}', '{}', '{}', {} ) ".format( account, start_author, start_permlink, limit )
-  return await get_data(db, sql )
+"""Cursor-based pagination queries, mostly supporting condenser_api."""
+
+from hive.server.common.helpers import last_month
+
+from hive.server.condenser_api.objects import _condenser_post_object
+from hive.server.database_api.methods import find_votes_impl, VotesPresentation
+
+# pylint: disable=too-many-lines
+
+async def get_followers(db, account: str, start: str, state: int, limit: int):
+    """Get a list of accounts following given account."""
+    sql = "SELECT * FROM condenser_get_followers( (:account)::VARCHAR, (:start)::VARCHAR, :type, :limit )"
+    return await db.query_col(sql, account=account, start=start, type=state, limit=limit)
+
+async def get_following(db, account: str, start: str, state: int, limit: int):
+    """Get a list of accounts followed by a given account."""
+    sql = "SELECT * FROM condenser_get_following( (:account)::VARCHAR, (:start)::VARCHAR, :type, :limit )"
+    return await db.query_col(sql, account=account, start=start, type=state, limit=limit)
+
+
+async def get_reblogged_by(db, author: str, permlink: str):
+    """Return all rebloggers of a post."""
+
+    sql = "SELECT * FROM condenser_get_names_by_reblogged( '{}', '{}' )".format( author, permlink )
+    names = await db.query_col(sql)
+
+    if author in names:
+        names.remove(author)
+    return names
+
+async def get_data(db, sql:str, truncate_body: int = 0):
+    result = await db.query_all(sql); 
+
+    posts = []
+    for row in result:
+        row = dict(row)
+        post = _condenser_post_object(row, truncate_body=truncate_body)
+
+        post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi)
+        posts.append(post)
+
+    return posts
+
+async def get_by_blog_without_reblog(db, account: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0):
+  """Get a list of posts for an author's blog without reblogs."""
+  sql = " SELECT * FROM condenser_get_by_blog_without_reblog( '{}', '{}', {} ) ".format( account, start_permlink, limit )
+  return await get_data(db, sql, truncate_body )
+
+async def get_by_account_comments(db, account: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0):
+  """Get a list of posts representing comments by an author."""
+  sql = " SELECT * FROM condenser_get_by_account_comments( '{}', '{}', {} ) ".format( account, start_permlink, limit )
+  return await get_data(db, sql, truncate_body )
+
+async def get_by_replies_to_account(db, start_author: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0):
+  """Get a list of posts representing replies to an author."""
+  sql = " SELECT * FROM condenser_get_by_replies_to_account( '{}', '{}', {} ) ".format( start_author, start_permlink, limit )
+  return await get_data(db, sql, truncate_body )
+
+async def get_by_blog(db, account: str = '', start_author: str = '', start_permlink: str = '', limit: int = 20):
+  """Get a list of posts for an author's blog."""
+  sql = " SELECT * FROM condenser_get_by_blog( '{}', '{}', '{}', {} ) ".format( account, start_author, start_permlink, limit )
+  return await get_data(db, sql )
diff --git a/hive/server/condenser_api/methods.py b/hive/server/condenser_api/methods.py
index ebde88edb669fa6f155962e0dd77d3bea13d39af..4f9667f0aad4e9155d224162a6328ce76ea7de9b 100644
--- a/hive/server/condenser_api/methods.py
+++ b/hive/server/condenser_api/methods.py
@@ -1,463 +1,482 @@
-"""Steemd/condenser_api compatibility layer API methods."""
-from functools import wraps
-
-import hive.server.condenser_api.cursor as cursor
-from hive.server.condenser_api.objects import _mute_votes, _condenser_post_object
-from hive.server.common.helpers import (
-    ApiError,
-    return_error_info,
-    json_date,
-    valid_account,
-    valid_permlink,
-    valid_tag,
-    valid_offset,
-    valid_limit,
-    valid_follow_type)
-from hive.server.common.mutes import Mutes
-from hive.server.database_api.methods import find_votes_impl, VotesPresentation
-
-from hive.server.hive_api.public import get_by_feed_with_reblog_impl
-
-# pylint: disable=too-many-arguments,line-too-long,too-many-lines
-
-@return_error_info
-async def get_account_votes(context, account):
-    """Return an info message about get_acccount_votes being unsupported."""
-    # pylint: disable=unused-argument
-    assert False, "get_account_votes is no longer supported, for details see https://hive.blog/steemit/@steemitdev/additional-public-api-change"
-
-
-# Follows Queries
-
-def _legacy_follower(follower, following, follow_type):
-    return dict(follower=follower, following=following, what=[follow_type])
-
-@return_error_info
-async def get_followers(context, account: str, start: str, follow_type: str = None,
-                        limit: int = None, **kwargs):
-    """Get all accounts following `account`. (EOL)"""
-    # `type` reserved word workaround
-    if not follow_type and 'type' in kwargs:
-        follow_type = kwargs['type']
-    if not follow_type:
-        follow_type = 'blog'
-    followers = await cursor.get_followers(
-        context['db'],
-        valid_account(account),
-        valid_account(start, allow_empty=True),
-        valid_follow_type(follow_type),
-        valid_limit(limit, 1000, None))
-    return [_legacy_follower(name, account, follow_type) for name in followers]
-
-@return_error_info
-async def get_following(context, account: str, start: str, follow_type: str = None,
-                        limit: int = None, **kwargs):
-    """Get all accounts `account` follows. (EOL)"""
-    # `type` reserved word workaround
-    if not follow_type and 'type' in kwargs:
-        follow_type = kwargs['type']
-    if not follow_type:
-        follow_type = 'blog'
-    following = await cursor.get_following(
-        context['db'],
-        valid_account(account),
-        valid_account(start, allow_empty=True),
-        valid_follow_type(follow_type),
-        valid_limit(limit, 1000, None))
-    return [_legacy_follower(account, name, follow_type) for name in following]
-
-@return_error_info
-async def get_follow_count(context, account: str):
-    """Get follow count stats. (EOL)"""
-    count = await cursor.get_follow_counts(
-        context['db'],
-        valid_account(account))
-    return dict(account=account,
-                following_count=count['following'],
-                follower_count=count['followers'])
-
-@return_error_info
-async def get_reblogged_by(context, author: str, permlink: str):
-    """Get all rebloggers of a post."""
-    return await cursor.get_reblogged_by(
-        context['db'],
-        valid_account(author),
-        valid_permlink(permlink))
-
-@return_error_info
-async def get_account_reputations(context, account_lower_bound: str = None, limit: int = None):
-    db = context['db']
-    return await _get_account_reputations_impl(db, True, account_lower_bound, limit)
-
-async def _get_account_reputations_impl(db, fat_node_style, account_lower_bound, limit):
-    """Enumerate account reputations."""
-    limit = valid_limit(limit, 1000, None)
-
-    sql = "SELECT * FROM condenser_get_account_reputations( '{}', {}, {} )".format( account_lower_bound, account_lower_bound is None, limit )
-    rows = await db.query_all(sql, start=account_lower_bound, limit=limit)
-    if fat_node_style:
-        return [dict(account=r[0], reputation=r[1]) for r in rows]
-    else:
-        return {'reputations': [dict(name=r[0], reputation=r[1]) for r in rows]}
-
-# Content Primitives
-
-@return_error_info
-async def get_content(context, author: str, permlink: str, observer=None):
-    db = context['db']
-    return await _get_content_impl(db, True, author, permlink, observer)
-
-@return_error_info
-async def _get_content_impl(db, fat_node_style, author: str, permlink: str, observer=None):
-    """Get a single post object."""
-    valid_account(author)
-    valid_permlink(permlink)
-
-    sql = "SELECT * FROM condenser_get_content(:author, :permlink)"
-
-    post = None
-    result = await db.query_all(sql, author=author, permlink=permlink)
-    if result:
-        result = dict(result[0])
-        post = _condenser_post_object(result, 0, fat_node_style)
-        post['active_votes'] = await find_votes_impl(db, author, permlink, VotesPresentation.ActiveVotes if fat_node_style else VotesPresentation.CondenserApi)
-        if not observer:
-            post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all())
-        else:
-            blacklists_for_user = await Mutes.get_blacklists_for_observer(observer, {'db':db})
-            post['active_votes'] = _mute_votes(post['active_votes'], blacklists_for_user.keys())
-
-    return post
-
-@return_error_info
-async def get_content_replies(context, author: str, permlink: str):
-    db = context['db']
-    return await _get_content_replies_impl(db, True, author, permlink)
-
-@return_error_info
-async def _get_content_replies_impl(db, fat_node_style, author: str, permlink: str):
-    """Get a list of post objects based on parent."""
-    valid_account(author)
-    valid_permlink(permlink)
-
-    sql = "SELECT * FROM condenser_get_content_replies(:author, :permlink)"
-    result = await db.query_all(sql, author=author, permlink=permlink)
-
-    muted_accounts = Mutes.all()
-
-    posts = []
-    for row in result:
-        row = dict(row)
-        post = _condenser_post_object(row, get_content_additions=fat_node_style)
-        post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.ActiveVotes if fat_node_style else VotesPresentation.CondenserApi)
-        post['active_votes'] = _mute_votes(post['active_votes'], muted_accounts)
-        posts.append(post)
-
-    return posts
-
-# Discussion Queries
-
-def nested_query_compat(function):
-    """Unpack strange format used by some clients, accepted by steemd.
-
-    Sometimes a discussion query object is nested inside a list[1]. Eg:
-
-        {... "method":"condenser_api.get_discussions_by_hot",
-             "params":[{"tag":"steem","limit":1}]}
-
-    In these cases jsonrpcserver dispatch just shoves it into the first
-    arg. This decorator checks for this specific condition and unpacks
-    the query to be passed as kwargs.
-    """
-    @wraps(function)
-    def wrapper(*args, **kwargs):
-        """Checks for specific condition signature and unpacks query"""
-        if args and not kwargs and len(args) == 2 and isinstance(args[1], dict):
-            return function(args[0], **args[1])
-        return function(*args, **kwargs)
-    return wrapper
-
-@return_error_info
-@nested_query_compat
-async def get_posts_by_given_sort(context, sort: str, start_author: str = '', start_permlink: str = '',
-                                     limit: int = 20, tag: str = None,
-                                     truncate_body: int = 0, filter_tags: list = None):
-    """Query posts, sorted by creation date."""
-    assert not filter_tags, 'filter_tags not supported'
-
-    db = context['db']
-
-    start_author    = valid_account(start_author, allow_empty=True),
-    start_permlink  = valid_permlink(start_permlink, allow_empty=True),
-    limit           = valid_limit(limit, 100, 20),
-    tag             = valid_tag(tag, allow_empty=True)
-
-    posts = []
-   
-    if sort == 'created':
-      if tag == '':
-        sql = "SELECT * FROM condenser_get_discussions_by_created_with_empty_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
-      else:
-        sql = "SELECT * FROM condenser_get_discussions_by_created( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
-    elif sort == 'trending':
-      if tag == '':
-        sql = "SELECT * FROM condenser_get_discussions_by_trending_with_empty_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
-      else:
-        sql = "SELECT * FROM condenser_get_discussions_by_trending( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
-    elif sort == 'hot':
-      if tag == '':
-        sql = "SELECT * FROM condenser_get_discussions_by_hot_with_empty_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
-      else:
-        sql = "SELECT * FROM condenser_get_discussions_by_hot( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
-    elif sort == 'promoted':
-      if tag == '':
-        sql = "SELECT * FROM condenser_get_discussions_by_promoted_with_empty_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
-      else:
-        sql = "SELECT * FROM condenser_get_discussions_by_promoted( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
-    elif sort == 'post_by_payout':
-      sql = "SELECT * FROM condenser_get_post_discussions_by_payout( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
-    elif sort == 'comment_by_payout':
-      sql = "SELECT * FROM condenser_get_comment_discussions_by_payout( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
-    else:
-      return posts
-
-    sql_result = await db.query_all(sql, tag=tag, author=start_author, permlink=start_permlink, limit=limit )
-
-    for row in sql_result:
-        post = _condenser_post_object(row, truncate_body)
-        post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi)
-        posts.append(post)
-    return posts
-
-@return_error_info
-@nested_query_compat
-async def get_discussions_by_created(context, start_author: str = '', start_permlink: str = '',
-                                     limit: int = 20, tag: str = None,
-                                     truncate_body: int = 0, filter_tags: list = None):
-  return await get_posts_by_given_sort(context, 'created', start_author, start_permlink, limit, tag, truncate_body, filter_tags)
-
-@return_error_info
-@nested_query_compat
-async def get_discussions_by_trending(context, start_author: str = '', start_permlink: str = '',
-                                      limit: int = 20, tag: str = None,
-                                      truncate_body: int = 0, filter_tags: list = None):
-  return await get_posts_by_given_sort(context, 'trending', start_author, start_permlink, limit, tag, truncate_body, filter_tags)
-
-@return_error_info
-@nested_query_compat
-async def get_discussions_by_hot(context, start_author: str = '', start_permlink: str = '',
-                                 limit: int = 20, tag: str = None,
-                                 truncate_body: int = 0, filter_tags: list = None):
-  return await get_posts_by_given_sort(context, 'hot', start_author, start_permlink, limit, tag, truncate_body, filter_tags)
-
-@return_error_info
-@nested_query_compat
-async def get_discussions_by_promoted(context, start_author: str = '', start_permlink: str = '',
-                                      limit: int = 20, tag: str = None,
-                                      truncate_body: int = 0, filter_tags: list = None):
-  return await get_posts_by_given_sort(context, 'promoted', start_author, start_permlink, limit, tag, truncate_body, filter_tags)
-
-@return_error_info
-@nested_query_compat
-async def get_post_discussions_by_payout(context, start_author: str = '', start_permlink: str = '',
-                                         limit: int = 20, tag: str = None,
-                                         truncate_body: int = 0):
-  return await get_posts_by_given_sort(context, 'post_by_payout', start_author, start_permlink, limit, tag, truncate_body, [])
-
-@return_error_info
-@nested_query_compat
-async def get_comment_discussions_by_payout(context, start_author: str = '', start_permlink: str = '',
-                                            limit: int = 20, tag: str = None,
-                                            truncate_body: int = 0):
-  return await get_posts_by_given_sort(context, 'comment_by_payout', start_author, start_permlink, limit, tag, truncate_body, [])
-
-@return_error_info
-@nested_query_compat
-async def get_discussions_by_blog(context, tag: str = None, start_author: str = '',
-                                  start_permlink: str = '', limit: int = 20,
-                                  truncate_body: int = 0, filter_tags: list = None):
-    """Retrieve account's blog posts, including reblogs."""
-    assert tag, '`tag` cannot be blank'
-    assert not filter_tags, 'filter_tags not supported'
-    valid_account(tag)
-    valid_account(start_author, allow_empty=True)
-    valid_permlink(start_permlink, allow_empty=True)
-    valid_limit(limit, 100, 20)
-
-    sql = """
-        SELECT * FROM get_discussions_by_blog(:author, :start_author, :start_permlink, :limit)
-    """
-
-    db = context['db']
-    result = await db.query_all(sql, author=tag, start_author=start_author, start_permlink=start_permlink, limit=limit)
-    posts_by_id = []
-
-    for row in result:
-        row = dict(row)
-        post = _condenser_post_object(row, truncate_body=truncate_body)
-        post['active_votes'] = await find_votes_impl(db, post['author'], post['permlink'], VotesPresentation.CondenserApi)
-        post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all())
-        #posts_by_id[row['post_id']] = post
-        posts_by_id.append(post)
-
-    return posts_by_id
-
-@return_error_info
-@nested_query_compat
-async def get_discussions_by_feed(context, tag: str = None, start_author: str = '',
-                                  start_permlink: str = '', limit: int = 20,
-                                  truncate_body: int = 0, filter_tags: list = None):
-    """Retrieve account's personalized feed."""
-    assert tag, '`tag` cannot be blank'
-    assert not filter_tags, 'filter_tags not supported'
-    return await get_by_feed_with_reblog_impl(
-        context['db'],
-        valid_account(tag),
-        valid_account(start_author, allow_empty=True),
-        valid_permlink(start_permlink, allow_empty=True),
-        valid_limit(limit, 100, 20),
-        truncate_body)
-
-@return_error_info
-@nested_query_compat
-async def get_discussions_by_comments(context, start_author: str = None, start_permlink: str = '',
-                                      limit: int = 20, truncate_body: int = 0,
-                                      filter_tags: list = None):
-    """Get comments by made by author."""
-    assert start_author, '`start_author` cannot be blank'
-    assert not filter_tags, 'filter_tags not supported'
-    valid_account(start_author)
-    valid_permlink(start_permlink, allow_empty=True)
-    valid_limit(limit, 100, 20)
-
-    posts = []
-    db = context['db']
-
-    sql = " SELECT * FROM condenser_get_discussions_by_comments( '{}', '{}', {} ) ".format( start_author, start_permlink, limit )
-    result = await db.query_all(sql)
-
-    for row in result:
-        row = dict(row)
-        post = _condenser_post_object(row, truncate_body=truncate_body)
-        post['active_votes'] = await find_votes_impl(db, post['author'], post['permlink'], VotesPresentation.CondenserApi)
-        post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all())
-        posts.append(post)
-
-    return posts
-
-@return_error_info
-@nested_query_compat
-async def get_replies_by_last_update(context, start_author: str = None, start_permlink: str = '',
-                                     limit: int = 20, truncate_body: int = 0):
-    """Get all replies made to any of author's posts."""
-    assert start_author, '`start_author` cannot be blank'
-
-    return await cursor.get_by_replies_to_account(
-        context['db'],
-        valid_account(start_author),
-        valid_permlink(start_permlink, allow_empty=True),
-        valid_limit(limit, 100, 20),
-        truncate_body)
-
-@return_error_info
-@nested_query_compat
-async def get_discussions_by_author_before_date(context, author: str = None, start_permlink: str = '',
-                                                before_date: str = '', limit: int = 10):
-    """Retrieve account's blog posts, without reblogs.
-
-    NOTE: before_date is completely ignored, and it appears to be broken and/or
-    completely ignored in steemd as well. This call is similar to
-    get_discussions_by_blog but does NOT serve reblogs.
-    """
-    # pylint: disable=invalid-name,unused-argument
-    assert author, '`author` cannot be blank'
-    return await cursor.get_by_blog_without_reblog(
-        context['db'],
-        valid_account(author),
-        valid_permlink(start_permlink, allow_empty=True),
-        valid_limit(limit, 100, 10))
-
-@return_error_info
-@nested_query_compat
-async def get_blog(context, account: str, start_entry_id: int = 0, limit: int = None):
-    """Get posts for an author's blog (w/ reblogs), paged by index/limit.
-
-    Equivalent to get_discussions_by_blog, but uses offset-based pagination.
-
-    Examples: (ABW: old description and examples were misleading as in many cases code worked differently, also now more cases actually work that gave error earlier)
-    (acct, -1, limit) for limit 1..500 - returns latest (no more than) limit posts
-    (acct, 0) - returns latest single post (ABW: this is a bug but I left it here because I'm afraid it was actively used - it should return oldest post)
-    (acct, 0, limit) for limit 1..500 - same as (acct, -1, limit) - see above
-    (acct, last_idx) for positive last_idx - returns last_idx oldest posts, or posts in range [last_idx..last_idx-500) when last_idx >= 500
-    (acct, last_idx, limit) for positive last_idx and limit 1..500 - returns posts in range [last_idx..last_idx-limit)
-    """
-    db = context['db']
-
-    account = valid_account(account)
-    if not start_entry_id:
-        start_entry_id = -1
-    start_entry_id = valid_offset(start_entry_id)
-    if not limit:
-        limit = max(start_entry_id + 1, 1)
-        limit = min(limit, 500)
-    limit = valid_limit(limit, 500, None)
-
-    sql = "SELECT * FROM condenser_get_blog(:account, :last, :limit)"
-    result = await db.query_all(sql, account=account, last=start_entry_id, limit=limit)
-
-    muted_accounts = Mutes.all()
-    out = []
-    for row in result:
-        row = dict(row)
-        post = _condenser_post_object(row)
-
-        post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi)
-        post['active_votes'] = _mute_votes(post['active_votes'], muted_accounts)
-
-        out.append({"blog": account,
-                    "entry_id": row['entry_id'],
-                    "comment": post,
-                    "reblogged_on": json_date(row['reblogged_at'])})
-
-    return list(reversed(out))
-
-@return_error_info
-@nested_query_compat
-async def get_blog_entries(context, account: str, start_entry_id: int = 0, limit: int = None):
-    """Get 'entries' for an author's blog (w/ reblogs), paged by index/limit.
-
-    Interface identical to get_blog, but returns minimalistic post references.
-    """
-    db = context['db']
-
-    account = valid_account(account)
-    if not start_entry_id:
-        start_entry_id = -1
-    start_entry_id = valid_offset(start_entry_id)
-    if not limit:
-        limit = max(start_entry_id + 1, 1)
-        limit = min(limit, 500)
-    limit = valid_limit(limit, 500, None)
-
-    sql = "SELECT * FROM condenser_get_blog_entries(:account, :last, :limit)"
-    result = await db.query_all(sql, account=account, last=start_entry_id, limit=limit)
-
-    out = []
-    for row in result:
-        row = dict(row)
-        out.append({"blog": account,
-                    "entry_id": row['entry_id'],
-                    "author": row['author'],
-                    "permlink": row['permlink'],
-                    "reblogged_on": json_date(row['reblogged_at'])})
-
-    return list(reversed(out))
-
-@return_error_info
-async def get_active_votes(context, author: str, permlink: str):
-    """ Returns all votes for the given post. """
-    valid_account(author)
-    valid_permlink(permlink)
-    db = context['db']
-
-    return await find_votes_impl( db, author, permlink, VotesPresentation.ActiveVotes  )
+"""Steemd/condenser_api compatibility layer API methods."""
+from functools import wraps
+
+import hive.server.condenser_api.cursor as cursor
+from hive.server.condenser_api.objects import _mute_votes, _condenser_post_object
+from hive.server.common.helpers import (
+    ApiError,
+    return_error_info,
+    json_date,
+    valid_account,
+    valid_permlink,
+    valid_tag,
+    valid_offset,
+    valid_limit,
+    valid_follow_type)
+from hive.server.common.mutes import Mutes
+from hive.server.database_api.methods import find_votes_impl, VotesPresentation
+
+from hive.server.hive_api.public import get_by_feed_with_reblog_impl
+
+# pylint: disable=too-many-arguments,line-too-long,too-many-lines
+
+@return_error_info
+async def get_account_votes(context, account):
+    """Return an info message about get_acccount_votes being unsupported."""
+    # pylint: disable=unused-argument
+    assert False, "get_account_votes is no longer supported, for details see https://hive.blog/steemit/@steemitdev/additional-public-api-change"
+
+
+# Follows Queries
+
+def _legacy_follower(follower, following, follow_type):
+    return dict(follower=follower, following=following, what=[follow_type])
+
+@return_error_info
+async def get_followers(context, account: str, start: str = '', follow_type: str = None,
+                        limit: int = 1000, **kwargs):
+    """Get all accounts following `account`. (EOL)"""
+    # `type` reserved word workaround
+    if not follow_type and 'type' in kwargs:
+        follow_type = kwargs['type']
+    if not follow_type:
+        follow_type = 'blog'
+    followers = await cursor.get_followers(
+        context['db'],
+        valid_account(account),
+        valid_account(start, allow_empty=True),
+        valid_follow_type(follow_type),
+        valid_limit(limit, 1000, 1000))
+    return [_legacy_follower(name, account, follow_type) for name in followers]
+
+@return_error_info
+async def get_following(context, account: str, start: str = '', follow_type: str = None,
+                        limit: int = 1000, **kwargs):
+    """Get all accounts `account` follows. (EOL)"""
+    # `type` reserved word workaround
+    if not follow_type and 'type' in kwargs:
+        follow_type = kwargs['type']
+    if not follow_type:
+        follow_type = 'blog'
+    following = await cursor.get_following(
+        context['db'],
+        valid_account(account),
+        valid_account(start, allow_empty=True),
+        valid_follow_type(follow_type),
+        valid_limit(limit, 1000, 1000))
+    return [_legacy_follower(account, name, follow_type) for name in following]
+
+@return_error_info
+async def get_follow_count(context, account: str):
+    """Get follow count stats. (EOL)"""
+    db = context['db']
+    account = valid_account(account)
+    sql = "SELECT * FROM condenser_get_follow_count( (:account)::VARCHAR )"
+    counters = await db.query_row(sql, account=account)
+    return dict(account=account,
+                following_count=counters[0],
+                follower_count=counters[1])
+
+@return_error_info
+async def get_reblogged_by(context, author: str, permlink: str):
+    """Get all rebloggers of a post."""
+    return await cursor.get_reblogged_by(
+        context['db'],
+        valid_account(author),
+        valid_permlink(permlink))
+
+@return_error_info
+async def get_account_reputations(context, account_lower_bound: str = None, limit: int = None):
+    db = context['db']
+    return await _get_account_reputations_impl(db, True, account_lower_bound, limit)
+
+async def _get_account_reputations_impl(db, fat_node_style, account_lower_bound, limit):
+    """Enumerate account reputations."""
+    assert isinstance(account_lower_bound, str), "invalid account_lower_bound type"
+    limit = valid_limit(limit, 1000, 1000)
+
+    sql = "SELECT * FROM condenser_get_account_reputations( '{}', {}, {} )".format( account_lower_bound, account_lower_bound is None, limit )
+    rows = await db.query_all(sql, start=account_lower_bound, limit=limit)
+    if fat_node_style:
+        return [dict(account=r[0], reputation=r[1]) for r in rows]
+    else:
+        return {'reputations': [dict(name=r[0], reputation=r[1]) for r in rows]}
+
+# Content Primitives
+
+@return_error_info
+async def get_content(context, author: str, permlink: str, observer=None):
+    db = context['db']
+    return await _get_content_impl(db, True, author, permlink, observer)
+
+@return_error_info
+async def _get_content_impl(db, fat_node_style, author: str, permlink: str, observer=None):
+    """Get a single post object."""
+    valid_account(author)
+    valid_permlink(permlink)
+
+    sql = "SELECT * FROM condenser_get_content(:author, :permlink)"
+
+    post = None
+    result = await db.query_all(sql, author=author, permlink=permlink)
+    if result:
+        result = dict(result[0])
+        post = _condenser_post_object(result, 0, fat_node_style)
+        post['active_votes'] = await find_votes_impl(db, author, permlink, VotesPresentation.ActiveVotes if fat_node_style else VotesPresentation.CondenserApi)
+        if not observer:
+            post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all())
+        else:
+            blacklists_for_user = await Mutes.get_blacklists_for_observer(observer, {'db':db})
+            post['active_votes'] = _mute_votes(post['active_votes'], blacklists_for_user.keys())
+
+    return post
+
+@return_error_info
+async def get_content_replies(context, author: str, permlink: str):
+    db = context['db']
+    return await _get_content_replies_impl(db, True, author, permlink)
+
+@return_error_info
+async def _get_content_replies_impl(db, fat_node_style, author: str, permlink: str):
+    """Get a list of post objects based on parent."""
+    valid_account(author)
+    valid_permlink(permlink)
+
+    sql = "SELECT * FROM condenser_get_content_replies(:author, :permlink)"
+    result = await db.query_all(sql, author=author, permlink=permlink)
+
+    muted_accounts = Mutes.all()
+
+    posts = []
+    for row in result:
+        row = dict(row)
+        post = _condenser_post_object(row, get_content_additions=fat_node_style)
+        post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.ActiveVotes if fat_node_style else VotesPresentation.CondenserApi)
+        post['active_votes'] = _mute_votes(post['active_votes'], muted_accounts)
+        posts.append(post)
+
+    return posts
+
+# Discussion Queries
+
+def nested_query_compat(function):
+    """Unpack strange format used by some clients, accepted by steemd.
+
+    Sometimes a discussion query object is nested inside a list[1]. Eg:
+
+        {... "method":"condenser_api.get_discussions_by_hot",
+             "params":[{"tag":"steem","limit":1}]}
+
+    In these cases jsonrpcserver dispatch just shoves it into the first
+    arg. This decorator checks for this specific condition and unpacks
+    the query to be passed as kwargs.
+    """
+    @wraps(function)
+    def wrapper(*args, **kwargs):
+        """Checks for specific condition signature and unpacks query"""
+        if args and not kwargs and len(args) == 2 and isinstance(args[1], dict):
+            return function(args[0], **args[1])
+        return function(*args, **kwargs)
+    return wrapper
+
+@return_error_info
+@nested_query_compat
+async def get_posts_by_given_sort(context, sort: str, start_author: str = '', start_permlink: str = '',
+                                     limit: int = 20, tag: str = None,
+                                     truncate_body: int = 0, filter_tags: list = None):
+    """Query posts, sorted by creation date."""
+    assert not filter_tags, 'filter_tags not supported'
+
+    db = context['db']
+
+    start_author    = valid_account(start_author, allow_empty=True),
+    start_permlink  = valid_permlink(start_permlink, allow_empty=True),
+    limit           = valid_limit(limit, 100, 20),
+    tag             = valid_tag(tag, allow_empty=True)
+
+    posts = []
+    is_community = tag[:5] == 'hive-'
+   
+    if sort == 'created':
+      if is_community:
+        sql = "SELECT * FROM bridge_get_ranked_post_by_created_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )"
+      elif tag == '':
+        sql = "SELECT * FROM bridge_get_ranked_post_by_created( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+      else:
+        sql = "SELECT * FROM bridge_get_ranked_post_by_created_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+    elif sort == 'trending':
+      if is_community:
+        sql = "SELECT * FROM bridge_get_ranked_post_by_trends_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )"
+      elif tag == '':
+        sql = "SELECT * FROM bridge_get_ranked_post_by_trends( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+      else:
+        sql = "SELECT * FROM bridge_get_ranked_post_by_trends_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+    elif sort == 'hot':
+      if is_community:
+        sql = "SELECT * FROM bridge_get_ranked_post_by_hot_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+      elif tag == '':
+        sql = "SELECT * FROM bridge_get_ranked_post_by_hot( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+      else:
+        sql = "SELECT * FROM bridge_get_ranked_post_by_hot_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+    elif sort == 'promoted':
+      if is_community:
+        sql = "SELECT * FROM bridge_get_ranked_post_by_promoted_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+      elif tag == '':
+        sql = "SELECT * FROM bridge_get_ranked_post_by_promoted( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+      else:
+        sql = "SELECT * FROM bridge_get_ranked_post_by_promoted_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+    elif sort == 'post_by_payout':
+      if tag == '':
+        sql = "SELECT * FROM bridge_get_ranked_post_by_payout( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )"
+      else:
+        sql = "SELECT * FROM bridge_get_ranked_post_by_payout_for_category( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )"
+    elif sort == 'comment_by_payout':
+      if tag == '':
+        sql = "SELECT * FROM bridge_get_ranked_post_by_payout_comments( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+      else:
+        sql = "SELECT * FROM bridge_get_ranked_post_by_payout_comments_for_category( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )"
+    else:
+      return posts
+
+    sql_result = await db.query_all(sql, tag=tag, author=start_author, permlink=start_permlink, limit=limit )
+
+    muted_accounts = Mutes.all()
+    for row in sql_result:
+        post = _condenser_post_object(row, truncate_body)
+        post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi)
+        post['active_votes'] = _mute_votes(post['active_votes'], muted_accounts)
+        posts.append(post)
+    return posts
+
+@return_error_info
+@nested_query_compat
+async def get_discussions_by_created(context, start_author: str = '', start_permlink: str = '',
+                                     limit: int = 20, tag: str = None,
+                                     truncate_body: int = 0, filter_tags: list = None):
+  return await get_posts_by_given_sort(context, 'created', start_author, start_permlink, limit, tag, truncate_body, filter_tags)
+
+@return_error_info
+@nested_query_compat
+async def get_discussions_by_trending(context, start_author: str = '', start_permlink: str = '',
+                                      limit: int = 20, tag: str = None,
+                                      truncate_body: int = 0, filter_tags: list = None):
+  return await get_posts_by_given_sort(context, 'trending', start_author, start_permlink, limit, tag, truncate_body, filter_tags)
+
+@return_error_info
+@nested_query_compat
+async def get_discussions_by_hot(context, start_author: str = '', start_permlink: str = '',
+                                 limit: int = 20, tag: str = None,
+                                 truncate_body: int = 0, filter_tags: list = None):
+  return await get_posts_by_given_sort(context, 'hot', start_author, start_permlink, limit, tag, truncate_body, filter_tags)
+
+@return_error_info
+@nested_query_compat
+async def get_discussions_by_promoted(context, start_author: str = '', start_permlink: str = '',
+                                      limit: int = 20, tag: str = None,
+                                      truncate_body: int = 0, filter_tags: list = None):
+  return await get_posts_by_given_sort(context, 'promoted', start_author, start_permlink, limit, tag, truncate_body, filter_tags)
+
+@return_error_info
+@nested_query_compat
+async def get_post_discussions_by_payout(context, start_author: str = '', start_permlink: str = '',
+                                         limit: int = 20, tag: str = None,
+                                         truncate_body: int = 0):
+  return await get_posts_by_given_sort(context, 'post_by_payout', start_author, start_permlink, limit, tag, truncate_body, [])
+
+@return_error_info
+@nested_query_compat
+async def get_comment_discussions_by_payout(context, start_author: str = '', start_permlink: str = '',
+                                            limit: int = 20, tag: str = None,
+                                            truncate_body: int = 0):
+  return await get_posts_by_given_sort(context, 'comment_by_payout', start_author, start_permlink, limit, tag, truncate_body, [])
+
+@return_error_info
+@nested_query_compat
+async def get_discussions_by_blog(context, tag: str = None, start_author: str = '',
+                                  start_permlink: str = '', limit: int = 20,
+                                  truncate_body: int = 0, filter_tags: list = None):
+    """Retrieve account's blog posts, including reblogs."""
+    assert tag, '`tag` cannot be blank'
+    assert not filter_tags, 'filter_tags not supported'
+    valid_account(tag)
+    valid_account(start_author, allow_empty=True)
+    valid_permlink(start_permlink, allow_empty=True)
+    valid_limit(limit, 100, 20)
+
+    sql = """
+        SELECT * FROM get_discussions_by_blog(:author, :start_author, :start_permlink, :limit)
+    """
+
+    db = context['db']
+    result = await db.query_all(sql, author=tag, start_author=start_author, start_permlink=start_permlink, limit=limit)
+    posts_by_id = []
+
+    for row in result:
+        row = dict(row)
+        post = _condenser_post_object(row, truncate_body=truncate_body)
+        post['active_votes'] = await find_votes_impl(db, post['author'], post['permlink'], VotesPresentation.CondenserApi)
+        post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all())
+        #posts_by_id[row['post_id']] = post
+        posts_by_id.append(post)
+
+    return posts_by_id
+
+@return_error_info
+@nested_query_compat
+async def get_discussions_by_feed(context, tag: str = None, start_author: str = '',
+                                  start_permlink: str = '', limit: int = 20,
+                                  truncate_body: int = 0, filter_tags: list = None):
+    """Retrieve account's personalized feed."""
+    assert tag, '`tag` cannot be blank'
+    assert not filter_tags, 'filter_tags not supported'
+    return await get_by_feed_with_reblog_impl(
+        context['db'],
+        valid_account(tag),
+        valid_account(start_author, allow_empty=True),
+        valid_permlink(start_permlink, allow_empty=True),
+        valid_limit(limit, 100, 20),
+        truncate_body)
+
+@return_error_info
+@nested_query_compat
+async def get_discussions_by_comments(context, start_author: str = None, start_permlink: str = '',
+                                      limit: int = 20, truncate_body: int = 0,
+                                      filter_tags: list = None):
+    """Get comments by made by author."""
+    assert start_author, '`start_author` cannot be blank'
+    assert not filter_tags, 'filter_tags not supported'
+    valid_account(start_author)
+    valid_permlink(start_permlink, allow_empty=True)
+    valid_limit(limit, 100, 20)
+
+    posts = []
+    db = context['db']
+
+    sql = " SELECT * FROM condenser_get_discussions_by_comments( '{}', '{}', {} ) ".format( start_author, start_permlink, limit )
+    result = await db.query_all(sql)
+
+    for row in result:
+        row = dict(row)
+        post = _condenser_post_object(row, truncate_body=truncate_body)
+        post['active_votes'] = await find_votes_impl(db, post['author'], post['permlink'], VotesPresentation.CondenserApi)
+        post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all())
+        posts.append(post)
+
+    return posts
+
+@return_error_info
+@nested_query_compat
+async def get_replies_by_last_update(context, start_author: str = None, start_permlink: str = '',
+                                     limit: int = 20, truncate_body: int = 0):
+    """Get all replies made to any of author's posts."""
+    assert start_author, '`start_author` cannot be blank'
+
+    return await cursor.get_by_replies_to_account(
+        context['db'],
+        valid_account(start_author),
+        valid_permlink(start_permlink, allow_empty=True),
+        valid_limit(limit, 100, 20),
+        truncate_body)
+
+@return_error_info
+@nested_query_compat
+async def get_discussions_by_author_before_date(context, author: str = None, start_permlink: str = '',
+                                                before_date: str = '', limit: int = 10):
+    """Retrieve account's blog posts, without reblogs.
+
+    NOTE: before_date is completely ignored, and it appears to be broken and/or
+    completely ignored in steemd as well. This call is similar to
+    get_discussions_by_blog but does NOT serve reblogs.
+    """
+    # pylint: disable=invalid-name,unused-argument
+    assert author, '`author` cannot be blank'
+    return await cursor.get_by_blog_without_reblog(
+        context['db'],
+        valid_account(author),
+        valid_permlink(start_permlink, allow_empty=True),
+        valid_limit(limit, 100, 10))
+
+@return_error_info
+@nested_query_compat
+async def get_blog(context, account: str, start_entry_id: int = 0, limit: int = None):
+    """Get posts for an author's blog (w/ reblogs), paged by index/limit.
+
+    Equivalent to get_discussions_by_blog, but uses offset-based pagination.
+
+    Examples: (ABW: old description and examples were misleading as in many cases code worked differently, also now more cases actually work that gave error earlier)
+    (acct, -1, limit) for limit 1..500 - returns latest (no more than) limit posts
+    (acct, 0) - returns latest single post (ABW: this is a bug but I left it here because I'm afraid it was actively used - it should return oldest post)
+    (acct, 0, limit) for limit 1..500 - same as (acct, -1, limit) - see above
+    (acct, last_idx) for positive last_idx - returns last_idx oldest posts, or posts in range [last_idx..last_idx-500) when last_idx >= 500
+    (acct, last_idx, limit) for positive last_idx and limit 1..500 - returns posts in range [last_idx..last_idx-limit)
+    """
+    db = context['db']
+
+    account = valid_account(account)
+    if not start_entry_id:
+        start_entry_id = -1
+    start_entry_id = valid_offset(start_entry_id)
+    if not limit:
+        limit = max(start_entry_id + 1, 1)
+        limit = min(limit, 500)
+    limit = valid_limit(limit, 500, None)
+
+    sql = "SELECT * FROM condenser_get_blog(:account, :last, :limit)"
+    result = await db.query_all(sql, account=account, last=start_entry_id, limit=limit)
+
+    muted_accounts = Mutes.all()
+    out = []
+    for row in result:
+        row = dict(row)
+        post = _condenser_post_object(row)
+
+        post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi)
+        post['active_votes'] = _mute_votes(post['active_votes'], muted_accounts)
+
+        out.append({"blog": account,
+                    "entry_id": row['entry_id'],
+                    "comment": post,
+                    "reblogged_on": json_date(row['reblogged_at'])})
+
+    return list(reversed(out))
+
+@return_error_info
+@nested_query_compat
+async def get_blog_entries(context, account: str, start_entry_id: int = 0, limit: int = None):
+    """Get 'entries' for an author's blog (w/ reblogs), paged by index/limit.
+
+    Interface identical to get_blog, but returns minimalistic post references.
+    """
+    db = context['db']
+
+    account = valid_account(account)
+    if not start_entry_id:
+        start_entry_id = -1
+    start_entry_id = valid_offset(start_entry_id)
+    if not limit:
+        limit = max(start_entry_id + 1, 1)
+        limit = min(limit, 500)
+    limit = valid_limit(limit, 500, None)
+
+    sql = "SELECT * FROM condenser_get_blog_entries(:account, :last, :limit)"
+    result = await db.query_all(sql, account=account, last=start_entry_id, limit=limit)
+
+    out = []
+    for row in result:
+        row = dict(row)
+        out.append({"blog": account,
+                    "entry_id": row['entry_id'],
+                    "author": row['author'],
+                    "permlink": row['permlink'],
+                    "reblogged_on": json_date(row['reblogged_at'])})
+
+    return list(reversed(out))
+
+@return_error_info
+async def get_active_votes(context, author: str, permlink: str):
+    """ Returns all votes for the given post. """
+    valid_account(author)
+    valid_permlink(permlink)
+    db = context['db']
+
+    return await find_votes_impl( db, author, permlink, VotesPresentation.ActiveVotes  )
diff --git a/hive/server/condenser_api/tags.py b/hive/server/condenser_api/tags.py
index 0e6535888dfb9bc37d41dbb1117aa61c34e96e99..d6f05ca2b8d58a55ebf3a0367f6226deb016a039 100644
--- a/hive/server/condenser_api/tags.py
+++ b/hive/server/condenser_api/tags.py
@@ -7,16 +7,7 @@ from hive.server.common.helpers import (return_error_info, valid_tag, valid_limi
 @cached(ttl=7200, timeout=1200)
 async def get_top_trending_tags_summary(context):
     """Get top 50 trending tags among pending posts."""
-    # Same results, more overhead:
-    #return [tag['name'] for tag in await get_trending_tags('', 50)]
-    sql = """
-        SELECT (SELECT category FROM hive_category_data WHERE id = category_id) as category
-          FROM hive_posts
-         WHERE counter_deleted = 0 AND NOT is_paidout
-      GROUP BY category
-      ORDER BY SUM(payout + pending_payout) DESC
-         LIMIT 50
-    """
+    sql = "SELECT condenser_get_top_trending_tags_summary(50)"
     return await context['db'].query_col(sql)
 
 @return_error_info
@@ -27,32 +18,10 @@ async def get_trending_tags(context, start_tag: str = '', limit: int = 250):
     limit = valid_limit(limit, 250, 250)
     start_tag = valid_tag(start_tag or '', allow_empty=True)
 
-    if start_tag:
-        seek = """
-          HAVING SUM(payout + pending_payout) <= (
-            SELECT SUM(payout + pending_payout)
-              FROM hive_posts hp
-              JOIN hive_category_data hcd ON hcd.id = hp.category_id
-             WHERE NOT is_paidout AND counter_deleted = 0 AND hcd.category = :start_tag)
-        """
-    else:
-        seek = ''
-
-    sql = """
-      SELECT hcd.category,
-             COUNT(*) AS total_posts,
-             SUM(CASE WHEN hp.depth = 0 THEN 1 ELSE 0 END) AS top_posts,
-             SUM(hp.payout + hp.pending_payout) AS total_payouts
-        FROM hive_posts hp
-        JOIN hive_category_data hcd ON hcd.id = hp.category_id
-       WHERE NOT hp.is_paidout AND counter_deleted = 0
-    GROUP BY hcd.category %s
-    ORDER BY SUM(hp.payout + hp.pending_payout) DESC, hcd.category ASC
-       LIMIT :limit
-    """ % seek
+    sql = "SELECT * FROM condenser_get_trending_tags( (:tag)::VARCHAR, :limit )"
 
     out = []
-    for row in await context['db'].query_all(sql, limit=limit, start_tag=start_tag):
+    for row in await context['db'].query_all(sql, limit=limit, tag=start_tag):
         out.append({
             'name': row['category'],
             'comments': row['total_posts'] - row['top_posts'],
diff --git a/hive/server/hive_api/public.py b/hive/server/hive_api/public.py
index f6415604c1f6b8ee3cc63565d9852b203ecf9bd5..8119a99c963ae61e60a08759aba28b635955b424 100644
--- a/hive/server/hive_api/public.py
+++ b/hive/server/hive_api/public.py
@@ -44,7 +44,8 @@ async def list_followers(context, account:str, start:str='', limit:int=50, obser
         context['db'],
         valid_account(account),
         valid_account(start, allow_empty=True),
-        'blog', valid_limit(limit, 100, 50))
+        1, # blog
+        valid_limit(limit, 100, 50))
     return await accounts_by_name(context['db'], followers, observer, lite=True)
 
 async def list_following(context, account:str, start:str='', limit:int=50, observer:str=None):
@@ -53,7 +54,8 @@ async def list_following(context, account:str, start:str='', limit:int=50, obser
         context['db'],
         valid_account(account),
         valid_account(start, allow_empty=True),
-        'blog', valid_limit(limit, 100, 50))
+        1, # blog
+        valid_limit(limit, 100, 50))
     return await accounts_by_name(context['db'], following, observer, lite=True)
 
 async def list_all_muted(context, account):
diff --git a/tests/tests_api b/tests/tests_api
index 1ae2a4b1367139e7954b31596eaa4bf65e6aa68e..f38ef7c99adbab494a384941f0f71427d8ff9eee 160000
--- a/tests/tests_api
+++ b/tests/tests_api
@@ -1 +1 @@
-Subproject commit 1ae2a4b1367139e7954b31596eaa4bf65e6aa68e
+Subproject commit f38ef7c99adbab494a384941f0f71427d8ff9eee