diff --git a/hive/db/schema.py b/hive/db/schema.py index 282a96f0d6c0f3d901480b62d3ff2d5ab4d3335f..08718053185e5aa3c4db9912fca57d0a518fed8e 100644 --- a/hive/db/schema.py +++ b/hive/db/schema.py @@ -1,700 +1,700 @@ -"""Db schema definitions and setup routines.""" - -import sqlalchemy as sa -from sqlalchemy.sql import text as sql_text -from sqlalchemy.types import SMALLINT -from sqlalchemy.types import CHAR -from sqlalchemy.types import VARCHAR -from sqlalchemy.types import TEXT -from sqlalchemy.types import BOOLEAN - -import logging -log = logging.getLogger(__name__) - -#pylint: disable=line-too-long, too-many-lines, bad-whitespace - -# [DK] we changed and removed some tables so i upgraded DB_VERSION to 18 -DB_VERSION = 18 - -def build_metadata(): - """Build schema def with SqlAlchemy""" - metadata = sa.MetaData() - - sa.Table( - 'hive_blocks', metadata, - sa.Column('num', sa.Integer, primary_key=True, autoincrement=False), - sa.Column('hash', CHAR(40), nullable=False), - sa.Column('prev', CHAR(40)), - sa.Column('txs', SMALLINT, server_default='0', nullable=False), - sa.Column('ops', SMALLINT, server_default='0', nullable=False), - sa.Column('created_at', sa.DateTime, nullable=False), - - sa.UniqueConstraint('hash', name='hive_blocks_ux1'), - sa.ForeignKeyConstraint(['prev'], ['hive_blocks.hash'], name='hive_blocks_fk1'), - sa.Index('hive_blocks_created_at_idx', 'created_at') - ) - - sa.Table( - 'hive_accounts', metadata, - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('name', VARCHAR(16, collation='C'), nullable=False), - sa.Column('created_at', sa.DateTime, nullable=False), - #sa.Column('block_num', sa.Integer, nullable=False), - sa.Column('reputation', sa.BigInteger, nullable=False, server_default='0'), - sa.Column('is_implicit', sa.Boolean, nullable=False, server_default='1'), - sa.Column('followers', sa.Integer, nullable=False, server_default='0'), - sa.Column('following', sa.Integer, nullable=False, server_default='0'), - - sa.Column('rank', sa.Integer, nullable=False, server_default='0'), - - sa.Column('lastread_at', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'), - sa.Column('posting_json_metadata', sa.Text), - sa.Column('json_metadata', sa.Text), - - sa.UniqueConstraint('name', name='hive_accounts_ux1'), - sa.Index('hive_accounts_ix6', 'reputation') - ) - - sa.Table( - 'hive_reputation_data', metadata, - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('author_id', sa.Integer, nullable=False), - sa.Column('voter_id', sa.Integer, nullable=False), - sa.Column('permlink', sa.String(255, collation='C'), nullable=False), - sa.Column('rshares', sa.BigInteger, nullable=False), - sa.Column('block_num', sa.Integer, nullable=False), - - sa.Index('hive_reputation_data_author_permlink_voter_idx', 'author_id', 'permlink', 'voter_id'), - sa.Index('hive_reputation_data_block_num_idx', 'block_num') - ) - - sa.Table( - 'hive_posts', metadata, - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('root_id', sa.Integer, nullable=False), # records having initially set 0 will be updated to their id - sa.Column('parent_id', sa.Integer, nullable=False), - sa.Column('author_id', sa.Integer, nullable=False), - sa.Column('permlink_id', sa.Integer, nullable=False), - sa.Column('category_id', sa.Integer, nullable=False), - sa.Column('community_id', sa.Integer, nullable=True), - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('depth', SMALLINT, nullable=False), - sa.Column('counter_deleted', sa.Integer, nullable=False, server_default='0'), - sa.Column('is_pinned', BOOLEAN, nullable=False, server_default='0'), - sa.Column('is_muted', BOOLEAN, nullable=False, server_default='0'), - sa.Column('is_valid', BOOLEAN, nullable=False, server_default='1'), - sa.Column('promoted', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'), - - sa.Column('children', sa.Integer, nullable=False, server_default='0'), - - # core stats/indexes - sa.Column('payout', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'), - sa.Column('pending_payout', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'), - sa.Column('payout_at', sa.DateTime, nullable=False, server_default='1970-01-01'), - sa.Column('last_payout_at', sa.DateTime, nullable=False, server_default='1970-01-01'), - sa.Column('updated_at', sa.DateTime, nullable=False, server_default='1970-01-01'), - sa.Column('is_paidout', BOOLEAN, nullable=False, server_default='0'), - - # ui flags/filters - sa.Column('is_nsfw', BOOLEAN, nullable=False, server_default='0'), - sa.Column('is_declined', BOOLEAN, nullable=False, server_default='0'), - sa.Column('is_full_power', BOOLEAN, nullable=False, server_default='0'), - sa.Column('is_hidden', BOOLEAN, nullable=False, server_default='0'), - - # important indexes - sa.Column('sc_trend', sa.Float(precision=6), nullable=False, server_default='0'), - sa.Column('sc_hot', sa.Float(precision=6), nullable=False, server_default='0'), - - sa.Column('total_payout_value', sa.String(30), nullable=False, server_default='0.000 HBD'), - sa.Column('author_rewards', sa.BigInteger, nullable=False, server_default='0'), - - sa.Column('author_rewards_hive', sa.BigInteger, nullable=False, server_default='0'), - sa.Column('author_rewards_hbd', sa.BigInteger, nullable=False, server_default='0'), - sa.Column('author_rewards_vests', sa.BigInteger, nullable=False, server_default='0'), - - sa.Column('abs_rshares', sa.Numeric, nullable=False, server_default='0'), - sa.Column('vote_rshares', sa.Numeric, nullable=False, server_default='0'), - sa.Column('total_vote_weight', sa.Numeric, nullable=False, server_default='0'), - sa.Column('active', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'), - sa.Column('cashout_time', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'), - sa.Column('percent_hbd', sa.Integer, nullable=False, server_default='10000'), - - sa.Column('curator_payout_value', sa.String(30), nullable=False, server_default='0.000 HBD'), - sa.Column('max_accepted_payout', sa.String(30), nullable=False, server_default='1000000.000 HBD'), - sa.Column('allow_votes', BOOLEAN, nullable=False, server_default='1'), - sa.Column('allow_curation_rewards', BOOLEAN, nullable=False, server_default='1'), - sa.Column('beneficiaries', sa.JSON, nullable=False, server_default='[]'), - sa.Column('block_num', sa.Integer, nullable=False ), - sa.Column('block_num_created', sa.Integer, nullable=False ), - - sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_posts_fk1'), - sa.ForeignKeyConstraint(['root_id'], ['hive_posts.id'], name='hive_posts_fk2'), - sa.ForeignKeyConstraint(['parent_id'], ['hive_posts.id'], name='hive_posts_fk3'), - sa.UniqueConstraint('author_id', 'permlink_id', 'counter_deleted', name='hive_posts_ux1'), - - sa.Index('hive_posts_depth_idx', 'depth'), - - sa.Index('hive_posts_root_id_id_idx', 'root_id','id'), - - sa.Index('hive_posts_parent_id_idx', 'parent_id'), - sa.Index('hive_posts_community_id_idx', 'community_id'), - - sa.Index('hive_posts_category_id_idx', 'category_id'), - sa.Index('hive_posts_payout_at_idx', 'payout_at'), - sa.Index('hive_posts_payout_idx', 'payout'), - sa.Index('hive_posts_promoted_idx', 'promoted'), - sa.Index('hive_posts_sc_trend_id_is_paidout_idx', 'sc_trend', 'id', 'is_paidout'), - sa.Index('hive_posts_sc_hot_id_is_paidout_idx', 'sc_hot', 'id', 'is_paidout'), - sa.Index('hive_posts_author_id_created_at_idx', sa.text('author_id DESC, created_at DESC')), - sa.Index('hive_posts_block_num_idx', 'block_num'), - sa.Index('hive_posts_block_num_created_idx', 'block_num_created'), - sa.Index('hive_posts_cashout_time_id_idx', 'cashout_time', 'id'), - sa.Index('hive_posts_updated_at_idx', sa.text('updated_at DESC')), - sa.Index('hive_posts_payout_plus_pending_payout_id_is_paidout_idx', sa.text('(payout+pending_payout), id, is_paidout')) - ) - - sa.Table( - 'hive_post_data', metadata, - sa.Column('id', sa.Integer, primary_key=True, autoincrement=False), - sa.Column('title', VARCHAR(512), nullable=False, server_default=''), - sa.Column('preview', VARCHAR(1024), nullable=False, server_default=''), # first 1k of 'body' - sa.Column('img_url', VARCHAR(1024), nullable=False, server_default=''), # first 'image' from 'json' - sa.Column('body', TEXT, nullable=False, server_default=''), - sa.Column('json', TEXT, nullable=False, server_default='') - ) - - sa.Table( - 'hive_permlink_data', metadata, - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('permlink', sa.String(255, collation='C'), nullable=False), - sa.UniqueConstraint('permlink', name='hive_permlink_data_permlink') - ) - - sa.Table( - 'hive_category_data', metadata, - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('category', sa.String(255, collation='C'), nullable=False), - sa.UniqueConstraint('category', name='hive_category_data_category') - ) - - sa.Table( - 'hive_votes', metadata, - sa.Column('id', sa.BigInteger, primary_key=True), - sa.Column('post_id', sa.Integer, nullable=False), - sa.Column('voter_id', sa.Integer, nullable=False), - sa.Column('author_id', sa.Integer, nullable=False), - sa.Column('permlink_id', sa.Integer, nullable=False), - sa.Column('weight', sa.Numeric, nullable=False, server_default='0'), - sa.Column('rshares', sa.BigInteger, nullable=False, server_default='0'), - sa.Column('vote_percent', sa.Integer, server_default='0'), - sa.Column('last_update', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'), - sa.Column('num_changes', sa.Integer, server_default='0'), - sa.Column('block_num', sa.Integer, nullable=False ), - sa.Column('is_effective', BOOLEAN, nullable=False, server_default='0'), - - sa.UniqueConstraint('voter_id', 'author_id', 'permlink_id', name='hive_votes_voter_id_author_id_permlink_id_uk'), - - sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_votes_fk1'), - sa.ForeignKeyConstraint(['voter_id'], ['hive_accounts.id'], name='hive_votes_fk2'), - sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_votes_fk3'), - sa.ForeignKeyConstraint(['permlink_id'], ['hive_permlink_data.id'], name='hive_votes_fk4'), - sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_votes_fk5'), - - sa.Index('hive_votes_voter_id_post_id_idx', 'voter_id', 'post_id'), # probably this index is redundant to hive_votes_voter_id_last_update_idx because of starting voter_id. - sa.Index('hive_votes_voter_id_last_update_idx', 'voter_id', 'last_update'), # this index is critical for hive_accounts_info_view performance - sa.Index('hive_votes_post_id_voter_id_idx', 'post_id', 'voter_id'), - sa.Index('hive_votes_block_num_idx', 'block_num') # this is also important for hive_accounts_info_view - ) - - sa.Table( - 'hive_tag_data', metadata, - sa.Column('id', sa.Integer, nullable=False, primary_key=True), - sa.Column('tag', VARCHAR(64, collation='C'), nullable=False, server_default=''), - sa.UniqueConstraint('tag', name='hive_tag_data_ux1') - ) - - sa.Table( - 'hive_post_tags', metadata, - sa.Column('post_id', sa.Integer, nullable=False), - sa.Column('tag_id', sa.Integer, nullable=False), - sa.PrimaryKeyConstraint('post_id', 'tag_id', name='hive_post_tags_pk1'), - - sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_post_tags_fk1'), - sa.ForeignKeyConstraint(['tag_id'], ['hive_tag_data.id'], name='hive_post_tags_fk2'), - - sa.Index('hive_post_tags_tag_id_idx', 'tag_id') - ) - - sa.Table( - 'hive_follows', metadata, - sa.Column('id', sa.Integer, primary_key=True ), - sa.Column('follower', sa.Integer, nullable=False), - sa.Column('following', sa.Integer, nullable=False), - sa.Column('state', SMALLINT, nullable=False, server_default='1'), - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('blacklisted', sa.Boolean, nullable=False, server_default='0'), - sa.Column('follow_blacklists', sa.Boolean, nullable=False, server_default='0'), - sa.Column('follow_muted', BOOLEAN, nullable=False, server_default='0'), - sa.Column('block_num', sa.Integer, nullable=False ), - - sa.UniqueConstraint('following', 'follower', name='hive_follows_ux1'), # core - sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_follows_fk1'), - sa.Index('hive_follows_ix5a', 'following', 'state', 'created_at', 'follower'), - sa.Index('hive_follows_ix5b', 'follower', 'state', 'created_at', 'following'), - sa.Index('hive_follows_block_num_idx', 'block_num'), - sa.Index('hive_follows_created_at_idx', 'created_at'), - ) - - sa.Table( - 'hive_reblogs', metadata, - sa.Column('id', sa.Integer, primary_key=True ), - sa.Column('blogger_id', sa.Integer, nullable=False), - sa.Column('post_id', sa.Integer, nullable=False), - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('block_num', sa.Integer, nullable=False ), - - sa.ForeignKeyConstraint(['blogger_id'], ['hive_accounts.id'], name='hive_reblogs_fk1'), - sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_reblogs_fk2'), - sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_reblogs_fk3'), - sa.UniqueConstraint('blogger_id', 'post_id', name='hive_reblogs_ux1'), # core - sa.Index('hive_reblogs_post_id', 'post_id'), - sa.Index('hive_reblogs_block_num_idx', 'block_num'), - sa.Index('hive_reblogs_created_at_idx', 'created_at') - ) - - sa.Table( - 'hive_payments', metadata, - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('block_num', sa.Integer, nullable=False), - sa.Column('tx_idx', SMALLINT, nullable=False), - sa.Column('post_id', sa.Integer, nullable=False), - sa.Column('from_account', sa.Integer, nullable=False), - sa.Column('to_account', sa.Integer, nullable=False), - sa.Column('amount', sa.types.DECIMAL(10, 3), nullable=False), - sa.Column('token', VARCHAR(5), nullable=False), - - sa.ForeignKeyConstraint(['from_account'], ['hive_accounts.id'], name='hive_payments_fk1'), - sa.ForeignKeyConstraint(['to_account'], ['hive_accounts.id'], name='hive_payments_fk2'), - sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_payments_fk3'), - sa.Index('hive_payments_from', 'from_account'), - sa.Index('hive_payments_to', 'to_account'), - sa.Index('hive_payments_post_id', 'post_id'), - ) - - sa.Table( - 'hive_feed_cache', metadata, - sa.Column('post_id', sa.Integer, nullable=False), - sa.Column('account_id', sa.Integer, nullable=False), - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('block_num', sa.Integer, nullable=False), - sa.PrimaryKeyConstraint('account_id', 'post_id', name='hive_feed_cache_pk'), - sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_feed_cache_fk1'), - - sa.Index('hive_feed_cache_block_num_idx', 'block_num'), - sa.Index('hive_feed_cache_created_at_idx', 'created_at') - ) - - sa.Table( - 'hive_state', metadata, - sa.Column('block_num', sa.Integer, primary_key=True, autoincrement=False), - sa.Column('db_version', sa.Integer, nullable=False), - sa.Column('steem_per_mvest', sa.types.DECIMAL(14, 6), nullable=False), - sa.Column('usd_per_steem', sa.types.DECIMAL(14, 6), nullable=False), - sa.Column('sbd_per_steem', sa.types.DECIMAL(14, 6), nullable=False), - sa.Column('dgpo', sa.Text, nullable=False), - ) - - sa.Table( - 'hive_posts_api_helper', metadata, - sa.Column('id', sa.Integer, primary_key=True, autoincrement = False), - sa.Column('author_s_permlink', VARCHAR(275, collation='C'), nullable=False), # concatenation of author '/' permlink - sa.Index('hive_posts_api_helper_author_s_permlink_idx', 'author_s_permlink') - ) - - sa.Table( - 'hive_mentions', metadata, - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('post_id', sa.Integer, nullable=False), - sa.Column('account_id', sa.Integer, nullable=False), - sa.Column('block_num', sa.Integer, nullable=False), - - sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_mentions_fk1'), - sa.ForeignKeyConstraint(['account_id'], ['hive_accounts.id'], name='hive_mentions_fk2'), - - sa.Index('hive_mentions_account_id_idx', 'account_id'), - sa.UniqueConstraint('post_id', 'account_id', 'block_num', name='hive_mentions_ux1') - ) - - metadata = build_metadata_community(metadata) - - return metadata - -def build_metadata_community(metadata=None): - """Build community schema defs""" - if not metadata: - metadata = sa.MetaData() - - sa.Table( - 'hive_communities', metadata, - sa.Column('id', sa.Integer, primary_key=True, autoincrement=False), - sa.Column('type_id', SMALLINT, nullable=False), - sa.Column('lang', CHAR(2), nullable=False, server_default='en'), - sa.Column('name', VARCHAR(16, collation='C'), nullable=False), - sa.Column('title', sa.String(32), nullable=False, server_default=''), - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('sum_pending', sa.Integer, nullable=False, server_default='0'), - sa.Column('num_pending', sa.Integer, nullable=False, server_default='0'), - sa.Column('num_authors', sa.Integer, nullable=False, server_default='0'), - sa.Column('rank', sa.Integer, nullable=False, server_default='0'), - sa.Column('subscribers', sa.Integer, nullable=False, server_default='0'), - sa.Column('is_nsfw', BOOLEAN, nullable=False, server_default='0'), - sa.Column('about', sa.String(120), nullable=False, server_default=''), - sa.Column('primary_tag', sa.String(32), nullable=False, server_default=''), - sa.Column('category', sa.String(32), nullable=False, server_default=''), - sa.Column('avatar_url', sa.String(1024), nullable=False, server_default=''), - sa.Column('description', sa.String(5000), nullable=False, server_default=''), - sa.Column('flag_text', sa.String(5000), nullable=False, server_default=''), - sa.Column('settings', TEXT, nullable=False, server_default='{}'), - sa.Column('block_num', sa.Integer, nullable=False ), - - sa.UniqueConstraint('name', name='hive_communities_ux1'), - sa.Index('hive_communities_ix1', 'rank', 'id'), - sa.Index('hive_communities_block_num_idx', 'block_num') - ) - - sa.Table( - 'hive_roles', metadata, - sa.Column('account_id', sa.Integer, nullable=False), - sa.Column('community_id', sa.Integer, nullable=False), - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('role_id', SMALLINT, nullable=False, server_default='0'), - sa.Column('title', sa.String(140), nullable=False, server_default=''), - - sa.PrimaryKeyConstraint('account_id', 'community_id', name='hive_roles_pk'), - sa.Index('hive_roles_ix1', 'community_id', 'account_id', 'role_id'), - ) - - sa.Table( - 'hive_subscriptions', metadata, - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('account_id', sa.Integer, nullable=False), - sa.Column('community_id', sa.Integer, nullable=False), - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('block_num', sa.Integer, nullable=False ), - - sa.UniqueConstraint('account_id', 'community_id', name='hive_subscriptions_ux1'), - sa.Index('hive_subscriptions_community_idx', 'community_id'), - sa.Index('hive_subscriptions_block_num_idx', 'block_num') - ) - - sa.Table( - 'hive_notifs', metadata, - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('block_num', sa.Integer, nullable=False), - sa.Column('type_id', SMALLINT, nullable=False), - sa.Column('score', SMALLINT, nullable=False), - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('src_id', sa.Integer, nullable=True), - sa.Column('dst_id', sa.Integer, nullable=True), - sa.Column('post_id', sa.Integer, nullable=True), - sa.Column('community_id', sa.Integer, nullable=True), - sa.Column('block_num', sa.Integer, nullable=False), - sa.Column('payload', sa.Text, nullable=True), - - sa.Index('hive_notifs_ix1', 'dst_id', 'id', postgresql_where=sql_text("dst_id IS NOT NULL")), - sa.Index('hive_notifs_ix2', 'community_id', 'id', postgresql_where=sql_text("community_id IS NOT NULL")), - sa.Index('hive_notifs_ix3', 'community_id', 'type_id', 'id', postgresql_where=sql_text("community_id IS NOT NULL")), - sa.Index('hive_notifs_ix4', 'community_id', 'post_id', 'type_id', 'id', postgresql_where=sql_text("community_id IS NOT NULL AND post_id IS NOT NULL")), - sa.Index('hive_notifs_ix5', 'post_id', 'type_id', 'dst_id', 'src_id', postgresql_where=sql_text("post_id IS NOT NULL AND type_id IN (16,17)")), # filter: dedupe - sa.Index('hive_notifs_ix6', 'dst_id', 'created_at', 'score', 'id', postgresql_where=sql_text("dst_id IS NOT NULL")), # unread - ) - - sa.Table('hive_notification_cache', metadata, - sa.Column('id', sa.BigInteger, primary_key=True), - sa.Column('block_num', sa.Integer, nullable = False), - sa.Column('type_id', sa.Integer, nullable = False), - sa.Column('dst', sa.Integer, nullable=True), # dst account id except persistent notifs from hive_notifs - sa.Column('src', sa.Integer, nullable=True), # src account id - sa.Column('dst_post_id', sa.Integer, nullable=True), # destination post id - sa.Column('post_id', sa.Integer, nullable=True), - sa.Column('created_at', sa.DateTime, nullable=False), # notification creation time - sa.Column('score', sa.Integer, nullable=False), - sa.Column('community_title', sa.String(32), nullable=True), - sa.Column('community', sa.String(16), nullable=True), - sa.Column('payload', sa.String, nullable=True), - - sa.Index('hive_notification_cache_block_num_idx', 'block_num'), - sa.Index('hive_notification_cache_dst_score_idx', 'dst', 'score', postgresql_where=sql_text("dst IS NOT NULL")) - - ) - - return metadata - - -def teardown(db): - """Drop all tables""" - build_metadata().drop_all(db.engine()) - -def drop_fk(db): - db.query_no_return("START TRANSACTION") - for table in build_metadata().sorted_tables: - for fk in table.foreign_keys: - sql = """ALTER TABLE {} DROP CONSTRAINT IF EXISTS {}""".format(table.name, fk.name) - db.query_no_return(sql) - db.query_no_return("COMMIT") - -def create_fk(db): - from sqlalchemy.schema import AddConstraint - from sqlalchemy import text - connection = db.engine().connect() - connection.execute(text("START TRANSACTION")) - for table in build_metadata().sorted_tables: - for fk in table.foreign_keys: - connection.execute(AddConstraint(fk.constraint)) - connection.execute(text("COMMIT")) - -def setup(db): - """Creates all tables and seed data""" - # initialize schema - build_metadata().create_all(db.engine()) - - # tune auto vacuum/analyze - reset_autovac(db) - - # sets FILLFACTOR: - set_fillfactor(db) - - # default rows - sqls = [ - "INSERT INTO hive_state (block_num, db_version, steem_per_mvest, usd_per_steem, sbd_per_steem, dgpo) VALUES (0, %d, 0, 0, 0, '')" % DB_VERSION, - "INSERT INTO hive_blocks (num, hash, created_at) VALUES (0, '0000000000000000000000000000000000000000', '2016-03-24 16:04:57')", - - "INSERT INTO hive_permlink_data (id, permlink) VALUES (0, '')", - "INSERT INTO hive_category_data (id, category) VALUES (0, '')", - "INSERT INTO hive_tag_data (id, tag) VALUES (0, '')", - "INSERT INTO hive_accounts (id, name, created_at) VALUES (0, '', '1970-01-01T00:00:00')", - - "INSERT INTO hive_accounts (name, created_at) VALUES ('miners', '2016-03-24 16:05:00')", - "INSERT INTO hive_accounts (name, created_at) VALUES ('null', '2016-03-24 16:05:00')", - "INSERT INTO hive_accounts (name, created_at) VALUES ('temp', '2016-03-24 16:05:00')", - "INSERT INTO hive_accounts (name, created_at) VALUES ('initminer', '2016-03-24 16:05:00')", - - """ - INSERT INTO - public.hive_posts(id, root_id, parent_id, author_id, permlink_id, category_id, - community_id, created_at, depth, block_num, block_num_created - ) - VALUES - (0, 0, 0, 0, 0, 0, 0, now(), 0, 0, 0); - """] - for sql in sqls: - db.query(sql) - - sql = "CREATE INDEX hive_communities_ft1 ON hive_communities USING GIN (to_tsvector('english', title || ' ' || about))" - db.query(sql) - - # find_comment_id definition moved to utility_functions.sql - # find_account_id definition moved to utility_functions.sql - - # process_hive_post_operation definition moved to hive_post_operations.sql - # delete_hive_post moved to hive_post_operations.sql - - # In original hivemind, a value of 'active_at' was calculated from - # max - # { - # created ( account_create_operation ), - # last_account_update ( account_update_operation/account_update2_operation ), - # last_post ( comment_operation - only creation ) - # last_root_post ( comment_operation - only creation + only ROOT ), - # last_vote_time ( vote_operation ) - # } - # In order to simplify calculations, `last_account_update` is not taken into consideration, because this updating accounts is very rare - # and posting/voting after an account updating, fixes `active_at` value immediately. - - # hive_accounts_view definition moved to hive_accounts_view.sql - - # hive_posts_view definition moved to hive_posts_view.sql - - # update_hive_posts_root_id moved to update_hive_posts_root_id.sql - - # hive_votes_view definition moved into hive_votes_view.sql - - # database_api_vote, find_votes, list_votes_by_voter_comment, list_votes_by_comment_voter moved into database_api_list_votes.sql - - sql = """ - DO $$ - BEGIN - EXECUTE 'ALTER DATABASE '||current_database()||' SET join_collapse_limit TO 16'; - EXECUTE 'ALTER DATABASE '||current_database()||' SET from_collapse_limit TO 16'; - END - $$; - """ - db.query_no_return(sql) - - sql = """ - CREATE TABLE IF NOT EXISTS hive_db_patch_level - ( - level SERIAL NOT NULL PRIMARY KEY, - patch_date timestamp without time zone NOT NULL, - patched_to_revision TEXT - ); - """ - db.query_no_return(sql) - sql = """ - INSERT INTO hive_db_patch_level - (patch_date, patched_to_revision) - values - (now(), '{}'); - """ - - from hive.version import GIT_REVISION - db.query_no_return(sql.format(GIT_REVISION)) - - # max_time_stamp definition moved into utility_functions.sql - - # get_discussion definition moved to bridge_get_discussion.sql - - sql_scripts = [ - "utility_functions.sql", - "hive_accounts_view.sql", - "hive_accounts_info_view.sql", - "hive_posts_base_view.sql", - "hive_posts_view.sql", - "hive_votes_view.sql", - "hive_post_operations.sql", - "head_block_time.sql", - "update_feed_cache.sql", - "payout_stats_view.sql", - "update_hive_posts_mentions.sql", - "find_tag_id.sql", - "bridge_get_ranked_post_type.sql", - "bridge_get_ranked_post_for_communities.sql", - "bridge_get_ranked_post_for_observer_communities.sql", - "bridge_get_ranked_post_for_tag.sql", - "bridge_get_ranked_post_for_all.sql", - "calculate_account_reputations.sql", - "update_communities_rank.sql", - "delete_hive_posts_mentions.sql", - "notifications_view.sql", - "notifications_api.sql", - "bridge_get_account_posts_by_comments.sql", - "bridge_get_account_posts_by_payout.sql", - "bridge_get_account_posts_by_posts.sql", - "bridge_get_account_posts_by_replies.sql", - "bridge_get_relationship_between_accounts.sql", - "bridge_get_post.sql", - "bridge_get_discussion.sql", - "condenser_api_post_type.sql", - "condenser_api_post_ex_type.sql", - "condenser_get_blog.sql", - "condenser_get_content.sql", - "condenser_get_discussions_by_blog.sql", - "condenser_tags.sql", - "condenser_follows.sql", - "hot_and_trends.sql", - "update_hive_posts_children_count.sql", - "update_hive_posts_api_helper.sql", - "database_api_list_comments.sql", - "database_api_list_votes.sql", - "update_posts_rshares.sql", - "update_hive_post_root_id.sql", - "condenser_get_by_replies_to_account.sql", - "condenser_get_by_account_comments.sql", - "condenser_get_by_blog_without_reblog.sql", - "condenser_get_by_feed_with_reblog.sql", - "condenser_get_by_blog.sql", - "bridge_get_account_posts_by_blog.sql", - "condenser_get_names_by_reblogged.sql", - "condenser_get_discussions_by_comments.sql", - "condenser_get_account_reputations.sql" - - ] - from os.path import dirname, realpath - dir_path = dirname(realpath(__file__)) - for script in sql_scripts: - execute_sql_script(db.query_no_return, "{}/sql_scripts/{}".format(dir_path, script)) - - - - - -def reset_autovac(db): - """Initializes/resets per-table autovacuum/autoanalyze params. - - We use a scale factor of 0 and specify exact threshold tuple counts, - per-table, in the format (autovacuum_threshold, autoanalyze_threshold).""" - - autovac_config = { # vacuum analyze - 'hive_accounts': (50000, 100000), - 'hive_posts': (2500, 10000), - 'hive_post_tags': (5000, 10000), - 'hive_follows': (5000, 5000), - 'hive_feed_cache': (5000, 5000), - 'hive_blocks': (5000, 25000), - 'hive_reblogs': (5000, 5000), - 'hive_payments': (5000, 5000), - } - - for table, (n_vacuum, n_analyze) in autovac_config.items(): - sql = """ALTER TABLE %s SET (autovacuum_vacuum_scale_factor = 0, - autovacuum_vacuum_threshold = %s, - autovacuum_analyze_scale_factor = 0, - autovacuum_analyze_threshold = %s)""" - db.query(sql % (table, n_vacuum, n_analyze)) - - -def set_fillfactor(db): - """Initializes/resets FILLFACTOR for tables which are intesively updated""" - - fillfactor_config = { - 'hive_posts': 70, - 'hive_post_data': 70, - 'hive_votes': 70, - 'hive_reputation_data': 50 - } - - for table, fillfactor in fillfactor_config.items(): - sql = """ALTER TABLE {} SET (FILLFACTOR = {})""" - db.query(sql.format(table, fillfactor)) - -def set_logged_table_attribute(db, logged): - """Initializes/resets LOGGED/UNLOGGED attribute for tables which are intesively updated""" - - logged_config = [ - 'hive_accounts', - 'hive_permlink_data', - 'hive_post_tags', - 'hive_posts', - 'hive_post_data', - 'hive_votes', - 'hive_reputation_data' - ] - - for table in logged_config: - log.info("Setting {} attribute on a table: {}".format('LOGGED' if logged else 'UNLOGGED', table)) - sql = """ALTER TABLE {} SET {}""" - db.query_no_return(sql.format(table, 'LOGGED' if logged else 'UNLOGGED')) - -def execute_sql_script(query_executor, path_to_script): - """ Load and execute sql script from file - Params: - query_executor - callable to execute query with - path_to_script - path to script - Returns: - depending on query_executor - - Example: - print(execute_sql_script(db.query_row, "./test.sql")) - where test_sql: SELECT * FROM hive_state WHERE block_num = 0; - will return something like: (0, 18, Decimal('0.000000'), Decimal('0.000000'), Decimal('0.000000'), '') - """ - try: - sql_script = None - with open(path_to_script, 'r') as sql_script_file: - sql_script = sql_script_file.read() - if sql_script is not None: - return query_executor(sql_script) - except Exception as ex: - log.exception("Error running sql script: {}".format(ex)) - raise ex - return None +"""Db schema definitions and setup routines.""" + +import sqlalchemy as sa +from sqlalchemy.sql import text as sql_text +from sqlalchemy.types import SMALLINT +from sqlalchemy.types import CHAR +from sqlalchemy.types import VARCHAR +from sqlalchemy.types import TEXT +from sqlalchemy.types import BOOLEAN + +import logging +log = logging.getLogger(__name__) + +#pylint: disable=line-too-long, too-many-lines, bad-whitespace + +# [DK] we changed and removed some tables so i upgraded DB_VERSION to 18 +DB_VERSION = 18 + +def build_metadata(): + """Build schema def with SqlAlchemy""" + metadata = sa.MetaData() + + sa.Table( + 'hive_blocks', metadata, + sa.Column('num', sa.Integer, primary_key=True, autoincrement=False), + sa.Column('hash', CHAR(40), nullable=False), + sa.Column('prev', CHAR(40)), + sa.Column('txs', SMALLINT, server_default='0', nullable=False), + sa.Column('ops', SMALLINT, server_default='0', nullable=False), + sa.Column('created_at', sa.DateTime, nullable=False), + + sa.UniqueConstraint('hash', name='hive_blocks_ux1'), + sa.ForeignKeyConstraint(['prev'], ['hive_blocks.hash'], name='hive_blocks_fk1'), + sa.Index('hive_blocks_created_at_idx', 'created_at') + ) + + sa.Table( + 'hive_accounts', metadata, + sa.Column('id', sa.Integer, primary_key=True), + sa.Column('name', VARCHAR(16, collation='C'), nullable=False), + sa.Column('created_at', sa.DateTime, nullable=False), + #sa.Column('block_num', sa.Integer, nullable=False), + sa.Column('reputation', sa.BigInteger, nullable=False, server_default='0'), + sa.Column('is_implicit', sa.Boolean, nullable=False, server_default='1'), + sa.Column('followers', sa.Integer, nullable=False, server_default='0'), + sa.Column('following', sa.Integer, nullable=False, server_default='0'), + + sa.Column('rank', sa.Integer, nullable=False, server_default='0'), + + sa.Column('lastread_at', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'), + sa.Column('posting_json_metadata', sa.Text), + sa.Column('json_metadata', sa.Text), + + sa.UniqueConstraint('name', name='hive_accounts_ux1'), + sa.Index('hive_accounts_ix6', 'reputation') + ) + + sa.Table( + 'hive_reputation_data', metadata, + sa.Column('id', sa.Integer, primary_key=True), + sa.Column('author_id', sa.Integer, nullable=False), + sa.Column('voter_id', sa.Integer, nullable=False), + sa.Column('permlink', sa.String(255, collation='C'), nullable=False), + sa.Column('rshares', sa.BigInteger, nullable=False), + sa.Column('block_num', sa.Integer, nullable=False), + + sa.Index('hive_reputation_data_author_permlink_voter_idx', 'author_id', 'permlink', 'voter_id'), + sa.Index('hive_reputation_data_block_num_idx', 'block_num') + ) + + sa.Table( + 'hive_posts', metadata, + sa.Column('id', sa.Integer, primary_key=True), + sa.Column('root_id', sa.Integer, nullable=False), # records having initially set 0 will be updated to their id + sa.Column('parent_id', sa.Integer, nullable=False), + sa.Column('author_id', sa.Integer, nullable=False), + sa.Column('permlink_id', sa.Integer, nullable=False), + sa.Column('category_id', sa.Integer, nullable=False), + sa.Column('community_id', sa.Integer, nullable=True), + sa.Column('created_at', sa.DateTime, nullable=False), + sa.Column('depth', SMALLINT, nullable=False), + sa.Column('counter_deleted', sa.Integer, nullable=False, server_default='0'), + sa.Column('is_pinned', BOOLEAN, nullable=False, server_default='0'), + sa.Column('is_muted', BOOLEAN, nullable=False, server_default='0'), + sa.Column('is_valid', BOOLEAN, nullable=False, server_default='1'), + sa.Column('promoted', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'), + + sa.Column('children', sa.Integer, nullable=False, server_default='0'), + + # core stats/indexes + sa.Column('payout', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'), + sa.Column('pending_payout', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'), + sa.Column('payout_at', sa.DateTime, nullable=False, server_default='1970-01-01'), + sa.Column('last_payout_at', sa.DateTime, nullable=False, server_default='1970-01-01'), + sa.Column('updated_at', sa.DateTime, nullable=False, server_default='1970-01-01'), + sa.Column('is_paidout', BOOLEAN, nullable=False, server_default='0'), + + # ui flags/filters + sa.Column('is_nsfw', BOOLEAN, nullable=False, server_default='0'), + sa.Column('is_declined', BOOLEAN, nullable=False, server_default='0'), + sa.Column('is_full_power', BOOLEAN, nullable=False, server_default='0'), + sa.Column('is_hidden', BOOLEAN, nullable=False, server_default='0'), + + # important indexes + sa.Column('sc_trend', sa.Float(precision=6), nullable=False, server_default='0'), + sa.Column('sc_hot', sa.Float(precision=6), nullable=False, server_default='0'), + + sa.Column('total_payout_value', sa.String(30), nullable=False, server_default='0.000 HBD'), + sa.Column('author_rewards', sa.BigInteger, nullable=False, server_default='0'), + + sa.Column('author_rewards_hive', sa.BigInteger, nullable=False, server_default='0'), + sa.Column('author_rewards_hbd', sa.BigInteger, nullable=False, server_default='0'), + sa.Column('author_rewards_vests', sa.BigInteger, nullable=False, server_default='0'), + + sa.Column('abs_rshares', sa.Numeric, nullable=False, server_default='0'), + sa.Column('vote_rshares', sa.Numeric, nullable=False, server_default='0'), + sa.Column('total_vote_weight', sa.Numeric, nullable=False, server_default='0'), + sa.Column('active', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'), + sa.Column('cashout_time', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'), + sa.Column('percent_hbd', sa.Integer, nullable=False, server_default='10000'), + + sa.Column('curator_payout_value', sa.String(30), nullable=False, server_default='0.000 HBD'), + sa.Column('max_accepted_payout', sa.String(30), nullable=False, server_default='1000000.000 HBD'), + sa.Column('allow_votes', BOOLEAN, nullable=False, server_default='1'), + sa.Column('allow_curation_rewards', BOOLEAN, nullable=False, server_default='1'), + sa.Column('beneficiaries', sa.JSON, nullable=False, server_default='[]'), + sa.Column('block_num', sa.Integer, nullable=False ), + sa.Column('block_num_created', sa.Integer, nullable=False ), + + sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_posts_fk1'), + sa.ForeignKeyConstraint(['root_id'], ['hive_posts.id'], name='hive_posts_fk2'), + sa.ForeignKeyConstraint(['parent_id'], ['hive_posts.id'], name='hive_posts_fk3'), + sa.UniqueConstraint('author_id', 'permlink_id', 'counter_deleted', name='hive_posts_ux1'), + + sa.Index('hive_posts_depth_idx', 'depth'), + + sa.Index('hive_posts_root_id_id_idx', 'root_id','id'), + + sa.Index('hive_posts_parent_id_idx', 'parent_id'), + sa.Index('hive_posts_community_id_idx', 'community_id'), + + sa.Index('hive_posts_category_id_idx', 'category_id'), + sa.Index('hive_posts_payout_at_idx', 'payout_at'), + sa.Index('hive_posts_payout_idx', 'payout'), + sa.Index('hive_posts_promoted_idx', 'promoted'), + sa.Index('hive_posts_sc_trend_id_is_paidout_idx', 'sc_trend', 'id', 'is_paidout'), + sa.Index('hive_posts_sc_hot_id_is_paidout_idx', 'sc_hot', 'id', 'is_paidout'), + sa.Index('hive_posts_author_id_created_at_idx', sa.text('author_id DESC, created_at DESC')), + sa.Index('hive_posts_block_num_idx', 'block_num'), + sa.Index('hive_posts_block_num_created_idx', 'block_num_created'), + sa.Index('hive_posts_cashout_time_id_idx', 'cashout_time', 'id'), + sa.Index('hive_posts_updated_at_idx', sa.text('updated_at DESC')), + sa.Index('hive_posts_payout_plus_pending_payout_id_is_paidout_idx', sa.text('(payout+pending_payout), id, is_paidout')) + ) + + sa.Table( + 'hive_post_data', metadata, + sa.Column('id', sa.Integer, primary_key=True, autoincrement=False), + sa.Column('title', VARCHAR(512), nullable=False, server_default=''), + sa.Column('preview', VARCHAR(1024), nullable=False, server_default=''), # first 1k of 'body' + sa.Column('img_url', VARCHAR(1024), nullable=False, server_default=''), # first 'image' from 'json' + sa.Column('body', TEXT, nullable=False, server_default=''), + sa.Column('json', TEXT, nullable=False, server_default='') + ) + + sa.Table( + 'hive_permlink_data', metadata, + sa.Column('id', sa.Integer, primary_key=True), + sa.Column('permlink', sa.String(255, collation='C'), nullable=False), + sa.UniqueConstraint('permlink', name='hive_permlink_data_permlink') + ) + + sa.Table( + 'hive_category_data', metadata, + sa.Column('id', sa.Integer, primary_key=True), + sa.Column('category', sa.String(255, collation='C'), nullable=False), + sa.UniqueConstraint('category', name='hive_category_data_category') + ) + + sa.Table( + 'hive_votes', metadata, + sa.Column('id', sa.BigInteger, primary_key=True), + sa.Column('post_id', sa.Integer, nullable=False), + sa.Column('voter_id', sa.Integer, nullable=False), + sa.Column('author_id', sa.Integer, nullable=False), + sa.Column('permlink_id', sa.Integer, nullable=False), + sa.Column('weight', sa.Numeric, nullable=False, server_default='0'), + sa.Column('rshares', sa.BigInteger, nullable=False, server_default='0'), + sa.Column('vote_percent', sa.Integer, server_default='0'), + sa.Column('last_update', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'), + sa.Column('num_changes', sa.Integer, server_default='0'), + sa.Column('block_num', sa.Integer, nullable=False ), + sa.Column('is_effective', BOOLEAN, nullable=False, server_default='0'), + + sa.UniqueConstraint('voter_id', 'author_id', 'permlink_id', name='hive_votes_voter_id_author_id_permlink_id_uk'), + + sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_votes_fk1'), + sa.ForeignKeyConstraint(['voter_id'], ['hive_accounts.id'], name='hive_votes_fk2'), + sa.ForeignKeyConstraint(['author_id'], ['hive_accounts.id'], name='hive_votes_fk3'), + sa.ForeignKeyConstraint(['permlink_id'], ['hive_permlink_data.id'], name='hive_votes_fk4'), + sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_votes_fk5'), + + sa.Index('hive_votes_voter_id_post_id_idx', 'voter_id', 'post_id'), # probably this index is redundant to hive_votes_voter_id_last_update_idx because of starting voter_id. + sa.Index('hive_votes_voter_id_last_update_idx', 'voter_id', 'last_update'), # this index is critical for hive_accounts_info_view performance + sa.Index('hive_votes_post_id_voter_id_idx', 'post_id', 'voter_id'), + sa.Index('hive_votes_block_num_idx', 'block_num') # this is also important for hive_accounts_info_view + ) + + sa.Table( + 'hive_tag_data', metadata, + sa.Column('id', sa.Integer, nullable=False, primary_key=True), + sa.Column('tag', VARCHAR(64, collation='C'), nullable=False, server_default=''), + sa.UniqueConstraint('tag', name='hive_tag_data_ux1') + ) + + sa.Table( + 'hive_post_tags', metadata, + sa.Column('post_id', sa.Integer, nullable=False), + sa.Column('tag_id', sa.Integer, nullable=False), + sa.PrimaryKeyConstraint('post_id', 'tag_id', name='hive_post_tags_pk1'), + + sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_post_tags_fk1'), + sa.ForeignKeyConstraint(['tag_id'], ['hive_tag_data.id'], name='hive_post_tags_fk2'), + + sa.Index('hive_post_tags_tag_id_idx', 'tag_id') + ) + + sa.Table( + 'hive_follows', metadata, + sa.Column('id', sa.Integer, primary_key=True ), + sa.Column('follower', sa.Integer, nullable=False), + sa.Column('following', sa.Integer, nullable=False), + sa.Column('state', SMALLINT, nullable=False, server_default='1'), + sa.Column('created_at', sa.DateTime, nullable=False), + sa.Column('blacklisted', sa.Boolean, nullable=False, server_default='0'), + sa.Column('follow_blacklists', sa.Boolean, nullable=False, server_default='0'), + sa.Column('follow_muted', BOOLEAN, nullable=False, server_default='0'), + sa.Column('block_num', sa.Integer, nullable=False ), + + sa.UniqueConstraint('following', 'follower', name='hive_follows_ux1'), # core + sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_follows_fk1'), + sa.Index('hive_follows_ix5a', 'following', 'state', 'created_at', 'follower'), + sa.Index('hive_follows_ix5b', 'follower', 'state', 'created_at', 'following'), + sa.Index('hive_follows_block_num_idx', 'block_num'), + sa.Index('hive_follows_created_at_idx', 'created_at'), + ) + + sa.Table( + 'hive_reblogs', metadata, + sa.Column('id', sa.Integer, primary_key=True ), + sa.Column('blogger_id', sa.Integer, nullable=False), + sa.Column('post_id', sa.Integer, nullable=False), + sa.Column('created_at', sa.DateTime, nullable=False), + sa.Column('block_num', sa.Integer, nullable=False ), + + sa.ForeignKeyConstraint(['blogger_id'], ['hive_accounts.id'], name='hive_reblogs_fk1'), + sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_reblogs_fk2'), + sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_reblogs_fk3'), + sa.UniqueConstraint('blogger_id', 'post_id', name='hive_reblogs_ux1'), # core + sa.Index('hive_reblogs_post_id', 'post_id'), + sa.Index('hive_reblogs_block_num_idx', 'block_num'), + sa.Index('hive_reblogs_created_at_idx', 'created_at') + ) + + sa.Table( + 'hive_payments', metadata, + sa.Column('id', sa.Integer, primary_key=True), + sa.Column('block_num', sa.Integer, nullable=False), + sa.Column('tx_idx', SMALLINT, nullable=False), + sa.Column('post_id', sa.Integer, nullable=False), + sa.Column('from_account', sa.Integer, nullable=False), + sa.Column('to_account', sa.Integer, nullable=False), + sa.Column('amount', sa.types.DECIMAL(10, 3), nullable=False), + sa.Column('token', VARCHAR(5), nullable=False), + + sa.ForeignKeyConstraint(['from_account'], ['hive_accounts.id'], name='hive_payments_fk1'), + sa.ForeignKeyConstraint(['to_account'], ['hive_accounts.id'], name='hive_payments_fk2'), + sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_payments_fk3'), + sa.Index('hive_payments_from', 'from_account'), + sa.Index('hive_payments_to', 'to_account'), + sa.Index('hive_payments_post_id', 'post_id'), + ) + + sa.Table( + 'hive_feed_cache', metadata, + sa.Column('post_id', sa.Integer, nullable=False), + sa.Column('account_id', sa.Integer, nullable=False), + sa.Column('created_at', sa.DateTime, nullable=False), + sa.Column('block_num', sa.Integer, nullable=False), + sa.PrimaryKeyConstraint('account_id', 'post_id', name='hive_feed_cache_pk'), + sa.ForeignKeyConstraint(['block_num'], ['hive_blocks.num'], name='hive_feed_cache_fk1'), + + sa.Index('hive_feed_cache_block_num_idx', 'block_num'), + sa.Index('hive_feed_cache_created_at_idx', 'created_at') + ) + + sa.Table( + 'hive_state', metadata, + sa.Column('block_num', sa.Integer, primary_key=True, autoincrement=False), + sa.Column('db_version', sa.Integer, nullable=False), + sa.Column('steem_per_mvest', sa.types.DECIMAL(14, 6), nullable=False), + sa.Column('usd_per_steem', sa.types.DECIMAL(14, 6), nullable=False), + sa.Column('sbd_per_steem', sa.types.DECIMAL(14, 6), nullable=False), + sa.Column('dgpo', sa.Text, nullable=False), + ) + + sa.Table( + 'hive_posts_api_helper', metadata, + sa.Column('id', sa.Integer, primary_key=True, autoincrement = False), + sa.Column('author_s_permlink', VARCHAR(275, collation='C'), nullable=False), # concatenation of author '/' permlink + sa.Index('hive_posts_api_helper_author_s_permlink_idx', 'author_s_permlink') + ) + + sa.Table( + 'hive_mentions', metadata, + sa.Column('id', sa.Integer, primary_key=True), + sa.Column('post_id', sa.Integer, nullable=False), + sa.Column('account_id', sa.Integer, nullable=False), + sa.Column('block_num', sa.Integer, nullable=False), + + sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_mentions_fk1'), + sa.ForeignKeyConstraint(['account_id'], ['hive_accounts.id'], name='hive_mentions_fk2'), + + sa.Index('hive_mentions_account_id_idx', 'account_id'), + sa.UniqueConstraint('post_id', 'account_id', 'block_num', name='hive_mentions_ux1') + ) + + metadata = build_metadata_community(metadata) + + return metadata + +def build_metadata_community(metadata=None): + """Build community schema defs""" + if not metadata: + metadata = sa.MetaData() + + sa.Table( + 'hive_communities', metadata, + sa.Column('id', sa.Integer, primary_key=True, autoincrement=False), + sa.Column('type_id', SMALLINT, nullable=False), + sa.Column('lang', CHAR(2), nullable=False, server_default='en'), + sa.Column('name', VARCHAR(16, collation='C'), nullable=False), + sa.Column('title', sa.String(32), nullable=False, server_default=''), + sa.Column('created_at', sa.DateTime, nullable=False), + sa.Column('sum_pending', sa.Integer, nullable=False, server_default='0'), + sa.Column('num_pending', sa.Integer, nullable=False, server_default='0'), + sa.Column('num_authors', sa.Integer, nullable=False, server_default='0'), + sa.Column('rank', sa.Integer, nullable=False, server_default='0'), + sa.Column('subscribers', sa.Integer, nullable=False, server_default='0'), + sa.Column('is_nsfw', BOOLEAN, nullable=False, server_default='0'), + sa.Column('about', sa.String(120), nullable=False, server_default=''), + sa.Column('primary_tag', sa.String(32), nullable=False, server_default=''), + sa.Column('category', sa.String(32), nullable=False, server_default=''), + sa.Column('avatar_url', sa.String(1024), nullable=False, server_default=''), + sa.Column('description', sa.String(5000), nullable=False, server_default=''), + sa.Column('flag_text', sa.String(5000), nullable=False, server_default=''), + sa.Column('settings', TEXT, nullable=False, server_default='{}'), + sa.Column('block_num', sa.Integer, nullable=False ), + + sa.UniqueConstraint('name', name='hive_communities_ux1'), + sa.Index('hive_communities_ix1', 'rank', 'id'), + sa.Index('hive_communities_block_num_idx', 'block_num') + ) + + sa.Table( + 'hive_roles', metadata, + sa.Column('account_id', sa.Integer, nullable=False), + sa.Column('community_id', sa.Integer, nullable=False), + sa.Column('created_at', sa.DateTime, nullable=False), + sa.Column('role_id', SMALLINT, nullable=False, server_default='0'), + sa.Column('title', sa.String(140), nullable=False, server_default=''), + + sa.PrimaryKeyConstraint('account_id', 'community_id', name='hive_roles_pk'), + sa.Index('hive_roles_ix1', 'community_id', 'account_id', 'role_id'), + ) + + sa.Table( + 'hive_subscriptions', metadata, + sa.Column('id', sa.Integer, primary_key=True), + sa.Column('account_id', sa.Integer, nullable=False), + sa.Column('community_id', sa.Integer, nullable=False), + sa.Column('created_at', sa.DateTime, nullable=False), + sa.Column('block_num', sa.Integer, nullable=False ), + + sa.UniqueConstraint('account_id', 'community_id', name='hive_subscriptions_ux1'), + sa.Index('hive_subscriptions_community_idx', 'community_id'), + sa.Index('hive_subscriptions_block_num_idx', 'block_num') + ) + + sa.Table( + 'hive_notifs', metadata, + sa.Column('id', sa.Integer, primary_key=True), + sa.Column('block_num', sa.Integer, nullable=False), + sa.Column('type_id', SMALLINT, nullable=False), + sa.Column('score', SMALLINT, nullable=False), + sa.Column('created_at', sa.DateTime, nullable=False), + sa.Column('src_id', sa.Integer, nullable=True), + sa.Column('dst_id', sa.Integer, nullable=True), + sa.Column('post_id', sa.Integer, nullable=True), + sa.Column('community_id', sa.Integer, nullable=True), + sa.Column('block_num', sa.Integer, nullable=False), + sa.Column('payload', sa.Text, nullable=True), + + sa.Index('hive_notifs_ix1', 'dst_id', 'id', postgresql_where=sql_text("dst_id IS NOT NULL")), + sa.Index('hive_notifs_ix2', 'community_id', 'id', postgresql_where=sql_text("community_id IS NOT NULL")), + sa.Index('hive_notifs_ix3', 'community_id', 'type_id', 'id', postgresql_where=sql_text("community_id IS NOT NULL")), + sa.Index('hive_notifs_ix4', 'community_id', 'post_id', 'type_id', 'id', postgresql_where=sql_text("community_id IS NOT NULL AND post_id IS NOT NULL")), + sa.Index('hive_notifs_ix5', 'post_id', 'type_id', 'dst_id', 'src_id', postgresql_where=sql_text("post_id IS NOT NULL AND type_id IN (16,17)")), # filter: dedupe + sa.Index('hive_notifs_ix6', 'dst_id', 'created_at', 'score', 'id', postgresql_where=sql_text("dst_id IS NOT NULL")), # unread + ) + + sa.Table('hive_notification_cache', metadata, + sa.Column('id', sa.BigInteger, primary_key=True), + sa.Column('block_num', sa.Integer, nullable = False), + sa.Column('type_id', sa.Integer, nullable = False), + sa.Column('dst', sa.Integer, nullable=True), # dst account id except persistent notifs from hive_notifs + sa.Column('src', sa.Integer, nullable=True), # src account id + sa.Column('dst_post_id', sa.Integer, nullable=True), # destination post id + sa.Column('post_id', sa.Integer, nullable=True), + sa.Column('created_at', sa.DateTime, nullable=False), # notification creation time + sa.Column('score', sa.Integer, nullable=False), + sa.Column('community_title', sa.String(32), nullable=True), + sa.Column('community', sa.String(16), nullable=True), + sa.Column('payload', sa.String, nullable=True), + + sa.Index('hive_notification_cache_block_num_idx', 'block_num'), + sa.Index('hive_notification_cache_dst_score_idx', 'dst', 'score', postgresql_where=sql_text("dst IS NOT NULL")) + + ) + + return metadata + + +def teardown(db): + """Drop all tables""" + build_metadata().drop_all(db.engine()) + +def drop_fk(db): + db.query_no_return("START TRANSACTION") + for table in build_metadata().sorted_tables: + for fk in table.foreign_keys: + sql = """ALTER TABLE {} DROP CONSTRAINT IF EXISTS {}""".format(table.name, fk.name) + db.query_no_return(sql) + db.query_no_return("COMMIT") + +def create_fk(db): + from sqlalchemy.schema import AddConstraint + from sqlalchemy import text + connection = db.engine().connect() + connection.execute(text("START TRANSACTION")) + for table in build_metadata().sorted_tables: + for fk in table.foreign_keys: + connection.execute(AddConstraint(fk.constraint)) + connection.execute(text("COMMIT")) + +def setup(db): + """Creates all tables and seed data""" + # initialize schema + build_metadata().create_all(db.engine()) + + # tune auto vacuum/analyze + reset_autovac(db) + + # sets FILLFACTOR: + set_fillfactor(db) + + # default rows + sqls = [ + "INSERT INTO hive_state (block_num, db_version, steem_per_mvest, usd_per_steem, sbd_per_steem, dgpo) VALUES (0, %d, 0, 0, 0, '')" % DB_VERSION, + "INSERT INTO hive_blocks (num, hash, created_at) VALUES (0, '0000000000000000000000000000000000000000', '2016-03-24 16:04:57')", + + "INSERT INTO hive_permlink_data (id, permlink) VALUES (0, '')", + "INSERT INTO hive_category_data (id, category) VALUES (0, '')", + "INSERT INTO hive_tag_data (id, tag) VALUES (0, '')", + "INSERT INTO hive_accounts (id, name, created_at) VALUES (0, '', '1970-01-01T00:00:00')", + + "INSERT INTO hive_accounts (name, created_at) VALUES ('miners', '2016-03-24 16:05:00')", + "INSERT INTO hive_accounts (name, created_at) VALUES ('null', '2016-03-24 16:05:00')", + "INSERT INTO hive_accounts (name, created_at) VALUES ('temp', '2016-03-24 16:05:00')", + "INSERT INTO hive_accounts (name, created_at) VALUES ('initminer', '2016-03-24 16:05:00')", + + """ + INSERT INTO + public.hive_posts(id, root_id, parent_id, author_id, permlink_id, category_id, + community_id, created_at, depth, block_num, block_num_created + ) + VALUES + (0, 0, 0, 0, 0, 0, 0, now(), 0, 0, 0); + """] + for sql in sqls: + db.query(sql) + + sql = "CREATE INDEX hive_communities_ft1 ON hive_communities USING GIN (to_tsvector('english', title || ' ' || about))" + db.query(sql) + + # find_comment_id definition moved to utility_functions.sql + # find_account_id definition moved to utility_functions.sql + + # process_hive_post_operation definition moved to hive_post_operations.sql + # delete_hive_post moved to hive_post_operations.sql + + # In original hivemind, a value of 'active_at' was calculated from + # max + # { + # created ( account_create_operation ), + # last_account_update ( account_update_operation/account_update2_operation ), + # last_post ( comment_operation - only creation ) + # last_root_post ( comment_operation - only creation + only ROOT ), + # last_vote_time ( vote_operation ) + # } + # In order to simplify calculations, `last_account_update` is not taken into consideration, because this updating accounts is very rare + # and posting/voting after an account updating, fixes `active_at` value immediately. + + # hive_accounts_view definition moved to hive_accounts_view.sql + + # hive_posts_view definition moved to hive_posts_view.sql + + # update_hive_posts_root_id moved to update_hive_posts_root_id.sql + + # hive_votes_view definition moved into hive_votes_view.sql + + # database_api_vote, find_votes, list_votes_by_voter_comment, list_votes_by_comment_voter moved into database_api_list_votes.sql + + sql = """ + DO $$ + BEGIN + EXECUTE 'ALTER DATABASE '||current_database()||' SET join_collapse_limit TO 16'; + EXECUTE 'ALTER DATABASE '||current_database()||' SET from_collapse_limit TO 16'; + END + $$; + """ + db.query_no_return(sql) + + sql = """ + CREATE TABLE IF NOT EXISTS hive_db_patch_level + ( + level SERIAL NOT NULL PRIMARY KEY, + patch_date timestamp without time zone NOT NULL, + patched_to_revision TEXT + ); + """ + db.query_no_return(sql) + sql = """ + INSERT INTO hive_db_patch_level + (patch_date, patched_to_revision) + values + (now(), '{}'); + """ + + from hive.version import GIT_REVISION + db.query_no_return(sql.format(GIT_REVISION)) + + # max_time_stamp definition moved into utility_functions.sql + + # get_discussion definition moved to bridge_get_discussion.sql + + sql_scripts = [ + "utility_functions.sql", + "hive_accounts_view.sql", + "hive_accounts_info_view.sql", + "hive_posts_base_view.sql", + "hive_posts_view.sql", + "hive_votes_view.sql", + "hive_post_operations.sql", + "head_block_time.sql", + "update_feed_cache.sql", + "payout_stats_view.sql", + "update_hive_posts_mentions.sql", + "find_tag_id.sql", + "bridge_get_ranked_post_type.sql", + "bridge_get_ranked_post_for_communities.sql", + "bridge_get_ranked_post_for_observer_communities.sql", + "bridge_get_ranked_post_for_tag.sql", + "bridge_get_ranked_post_for_all.sql", + "calculate_account_reputations.sql", + "update_communities_rank.sql", + "delete_hive_posts_mentions.sql", + "notifications_view.sql", + "notifications_api.sql", + "bridge_get_account_posts_by_comments.sql", + "bridge_get_account_posts_by_payout.sql", + "bridge_get_account_posts_by_posts.sql", + "bridge_get_account_posts_by_replies.sql", + "bridge_get_relationship_between_accounts.sql", + "bridge_get_post.sql", + "bridge_get_discussion.sql", + "condenser_api_post_type.sql", + "condenser_api_post_ex_type.sql", + "condenser_get_blog.sql", + "condenser_get_content.sql", + "condenser_get_discussions_by_blog.sql", + "condenser_tags.sql", + "condenser_follows.sql", + "hot_and_trends.sql", + "update_hive_posts_children_count.sql", + "update_hive_posts_api_helper.sql", + "database_api_list_comments.sql", + "database_api_list_votes.sql", + "update_posts_rshares.sql", + "update_hive_post_root_id.sql", + "condenser_get_by_replies_to_account.sql", + "condenser_get_by_account_comments.sql", + "condenser_get_by_blog_without_reblog.sql", + "condenser_get_by_feed_with_reblog.sql", + "condenser_get_by_blog.sql", + "bridge_get_account_posts_by_blog.sql", + "condenser_get_names_by_reblogged.sql", + "condenser_get_discussions_by_comments.sql", + "condenser_get_account_reputations.sql" + + ] + from os.path import dirname, realpath + dir_path = dirname(realpath(__file__)) + for script in sql_scripts: + execute_sql_script(db.query_no_return, "{}/sql_scripts/{}".format(dir_path, script)) + + + + + +def reset_autovac(db): + """Initializes/resets per-table autovacuum/autoanalyze params. + + We use a scale factor of 0 and specify exact threshold tuple counts, + per-table, in the format (autovacuum_threshold, autoanalyze_threshold).""" + + autovac_config = { # vacuum analyze + 'hive_accounts': (50000, 100000), + 'hive_posts': (2500, 10000), + 'hive_post_tags': (5000, 10000), + 'hive_follows': (5000, 5000), + 'hive_feed_cache': (5000, 5000), + 'hive_blocks': (5000, 25000), + 'hive_reblogs': (5000, 5000), + 'hive_payments': (5000, 5000), + } + + for table, (n_vacuum, n_analyze) in autovac_config.items(): + sql = """ALTER TABLE %s SET (autovacuum_vacuum_scale_factor = 0, + autovacuum_vacuum_threshold = %s, + autovacuum_analyze_scale_factor = 0, + autovacuum_analyze_threshold = %s)""" + db.query(sql % (table, n_vacuum, n_analyze)) + + +def set_fillfactor(db): + """Initializes/resets FILLFACTOR for tables which are intesively updated""" + + fillfactor_config = { + 'hive_posts': 70, + 'hive_post_data': 70, + 'hive_votes': 70, + 'hive_reputation_data': 50 + } + + for table, fillfactor in fillfactor_config.items(): + sql = """ALTER TABLE {} SET (FILLFACTOR = {})""" + db.query(sql.format(table, fillfactor)) + +def set_logged_table_attribute(db, logged): + """Initializes/resets LOGGED/UNLOGGED attribute for tables which are intesively updated""" + + logged_config = [ + 'hive_accounts', + 'hive_permlink_data', + 'hive_post_tags', + 'hive_posts', + 'hive_post_data', + 'hive_votes', + 'hive_reputation_data' + ] + + for table in logged_config: + log.info("Setting {} attribute on a table: {}".format('LOGGED' if logged else 'UNLOGGED', table)) + sql = """ALTER TABLE {} SET {}""" + db.query_no_return(sql.format(table, 'LOGGED' if logged else 'UNLOGGED')) + +def execute_sql_script(query_executor, path_to_script): + """ Load and execute sql script from file + Params: + query_executor - callable to execute query with + path_to_script - path to script + Returns: + depending on query_executor + + Example: + print(execute_sql_script(db.query_row, "./test.sql")) + where test_sql: SELECT * FROM hive_state WHERE block_num = 0; + will return something like: (0, 18, Decimal('0.000000'), Decimal('0.000000'), Decimal('0.000000'), '') + """ + try: + sql_script = None + with open(path_to_script, 'r') as sql_script_file: + sql_script = sql_script_file.read() + if sql_script is not None: + return query_executor(sql_script) + except Exception as ex: + log.exception("Error running sql script: {}".format(ex)) + raise ex + return None diff --git a/hive/server/condenser_api/cursor.py b/hive/server/condenser_api/cursor.py index 763ddd17b35104c56b40f0686925c68d47d3be57..f93c46d9401f83bd3bbab324651efd37a508de53 100644 --- a/hive/server/condenser_api/cursor.py +++ b/hive/server/condenser_api/cursor.py @@ -1,62 +1,62 @@ -"""Cursor-based pagination queries, mostly supporting condenser_api.""" - -from hive.server.common.helpers import last_month - -from hive.server.condenser_api.objects import _condenser_post_object -from hive.server.database_api.methods import find_votes_impl, VotesPresentation - -# pylint: disable=too-many-lines - -async def get_followers(db, account: str, start: str, state: int, limit: int): - """Get a list of accounts following given account.""" - sql = "SELECT * FROM condenser_get_followers( (:account)::VARCHAR, (:start)::VARCHAR, :type, :limit )" - return await db.query_col(sql, account=account, start=start, type=state, limit=limit) - -async def get_following(db, account: str, start: str, state: int, limit: int): - """Get a list of accounts followed by a given account.""" - sql = "SELECT * FROM condenser_get_following( (:account)::VARCHAR, (:start)::VARCHAR, :type, :limit )" - return await db.query_col(sql, account=account, start=start, type=state, limit=limit) - - -async def get_reblogged_by(db, author: str, permlink: str): - """Return all rebloggers of a post.""" - - sql = "SELECT * FROM condenser_get_names_by_reblogged( '{}', '{}' )".format( author, permlink ) - names = await db.query_col(sql) - - if author in names: - names.remove(author) - return names - -async def get_data(db, sql:str, truncate_body: int = 0): - result = await db.query_all(sql); - - posts = [] - for row in result: - row = dict(row) - post = _condenser_post_object(row, truncate_body=truncate_body) - - post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi) - posts.append(post) - - return posts - -async def get_by_blog_without_reblog(db, account: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0): - """Get a list of posts for an author's blog without reblogs.""" - sql = " SELECT * FROM condenser_get_by_blog_without_reblog( '{}', '{}', {} ) ".format( account, start_permlink, limit ) - return await get_data(db, sql, truncate_body ) - -async def get_by_account_comments(db, account: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0): - """Get a list of posts representing comments by an author.""" - sql = " SELECT * FROM condenser_get_by_account_comments( '{}', '{}', {} ) ".format( account, start_permlink, limit ) - return await get_data(db, sql, truncate_body ) - -async def get_by_replies_to_account(db, start_author: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0): - """Get a list of posts representing replies to an author.""" - sql = " SELECT * FROM condenser_get_by_replies_to_account( '{}', '{}', {} ) ".format( start_author, start_permlink, limit ) - return await get_data(db, sql, truncate_body ) - -async def get_by_blog(db, account: str = '', start_author: str = '', start_permlink: str = '', limit: int = 20): - """Get a list of posts for an author's blog.""" - sql = " SELECT * FROM condenser_get_by_blog( '{}', '{}', '{}', {} ) ".format( account, start_author, start_permlink, limit ) - return await get_data(db, sql ) +"""Cursor-based pagination queries, mostly supporting condenser_api.""" + +from hive.server.common.helpers import last_month + +from hive.server.condenser_api.objects import _condenser_post_object +from hive.server.database_api.methods import find_votes_impl, VotesPresentation + +# pylint: disable=too-many-lines + +async def get_followers(db, account: str, start: str, state: int, limit: int): + """Get a list of accounts following given account.""" + sql = "SELECT * FROM condenser_get_followers( (:account)::VARCHAR, (:start)::VARCHAR, :type, :limit )" + return await db.query_col(sql, account=account, start=start, type=state, limit=limit) + +async def get_following(db, account: str, start: str, state: int, limit: int): + """Get a list of accounts followed by a given account.""" + sql = "SELECT * FROM condenser_get_following( (:account)::VARCHAR, (:start)::VARCHAR, :type, :limit )" + return await db.query_col(sql, account=account, start=start, type=state, limit=limit) + + +async def get_reblogged_by(db, author: str, permlink: str): + """Return all rebloggers of a post.""" + + sql = "SELECT * FROM condenser_get_names_by_reblogged( '{}', '{}' )".format( author, permlink ) + names = await db.query_col(sql) + + if author in names: + names.remove(author) + return names + +async def get_data(db, sql:str, truncate_body: int = 0): + result = await db.query_all(sql); + + posts = [] + for row in result: + row = dict(row) + post = _condenser_post_object(row, truncate_body=truncate_body) + + post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi) + posts.append(post) + + return posts + +async def get_by_blog_without_reblog(db, account: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0): + """Get a list of posts for an author's blog without reblogs.""" + sql = " SELECT * FROM condenser_get_by_blog_without_reblog( '{}', '{}', {} ) ".format( account, start_permlink, limit ) + return await get_data(db, sql, truncate_body ) + +async def get_by_account_comments(db, account: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0): + """Get a list of posts representing comments by an author.""" + sql = " SELECT * FROM condenser_get_by_account_comments( '{}', '{}', {} ) ".format( account, start_permlink, limit ) + return await get_data(db, sql, truncate_body ) + +async def get_by_replies_to_account(db, start_author: str, start_permlink: str = '', limit: int = 20, truncate_body: int = 0): + """Get a list of posts representing replies to an author.""" + sql = " SELECT * FROM condenser_get_by_replies_to_account( '{}', '{}', {} ) ".format( start_author, start_permlink, limit ) + return await get_data(db, sql, truncate_body ) + +async def get_by_blog(db, account: str = '', start_author: str = '', start_permlink: str = '', limit: int = 20): + """Get a list of posts for an author's blog.""" + sql = " SELECT * FROM condenser_get_by_blog( '{}', '{}', '{}', {} ) ".format( account, start_author, start_permlink, limit ) + return await get_data(db, sql ) diff --git a/hive/server/condenser_api/methods.py b/hive/server/condenser_api/methods.py index 4f9667f0aad4e9155d224162a6328ce76ea7de9b..b79a8206603151bb017cdf6806ba4c120d9c8eb4 100644 --- a/hive/server/condenser_api/methods.py +++ b/hive/server/condenser_api/methods.py @@ -1,482 +1,482 @@ -"""Steemd/condenser_api compatibility layer API methods.""" -from functools import wraps - -import hive.server.condenser_api.cursor as cursor -from hive.server.condenser_api.objects import _mute_votes, _condenser_post_object -from hive.server.common.helpers import ( - ApiError, - return_error_info, - json_date, - valid_account, - valid_permlink, - valid_tag, - valid_offset, - valid_limit, - valid_follow_type) -from hive.server.common.mutes import Mutes -from hive.server.database_api.methods import find_votes_impl, VotesPresentation - -from hive.server.hive_api.public import get_by_feed_with_reblog_impl - -# pylint: disable=too-many-arguments,line-too-long,too-many-lines - -@return_error_info -async def get_account_votes(context, account): - """Return an info message about get_acccount_votes being unsupported.""" - # pylint: disable=unused-argument - assert False, "get_account_votes is no longer supported, for details see https://hive.blog/steemit/@steemitdev/additional-public-api-change" - - -# Follows Queries - -def _legacy_follower(follower, following, follow_type): - return dict(follower=follower, following=following, what=[follow_type]) - -@return_error_info -async def get_followers(context, account: str, start: str = '', follow_type: str = None, - limit: int = 1000, **kwargs): - """Get all accounts following `account`. (EOL)""" - # `type` reserved word workaround - if not follow_type and 'type' in kwargs: - follow_type = kwargs['type'] - if not follow_type: - follow_type = 'blog' - followers = await cursor.get_followers( - context['db'], - valid_account(account), - valid_account(start, allow_empty=True), - valid_follow_type(follow_type), - valid_limit(limit, 1000, 1000)) - return [_legacy_follower(name, account, follow_type) for name in followers] - -@return_error_info -async def get_following(context, account: str, start: str = '', follow_type: str = None, - limit: int = 1000, **kwargs): - """Get all accounts `account` follows. (EOL)""" - # `type` reserved word workaround - if not follow_type and 'type' in kwargs: - follow_type = kwargs['type'] - if not follow_type: - follow_type = 'blog' - following = await cursor.get_following( - context['db'], - valid_account(account), - valid_account(start, allow_empty=True), - valid_follow_type(follow_type), - valid_limit(limit, 1000, 1000)) - return [_legacy_follower(account, name, follow_type) for name in following] - -@return_error_info -async def get_follow_count(context, account: str): - """Get follow count stats. (EOL)""" - db = context['db'] - account = valid_account(account) - sql = "SELECT * FROM condenser_get_follow_count( (:account)::VARCHAR )" - counters = await db.query_row(sql, account=account) - return dict(account=account, - following_count=counters[0], - follower_count=counters[1]) - -@return_error_info -async def get_reblogged_by(context, author: str, permlink: str): - """Get all rebloggers of a post.""" - return await cursor.get_reblogged_by( - context['db'], - valid_account(author), - valid_permlink(permlink)) - -@return_error_info -async def get_account_reputations(context, account_lower_bound: str = None, limit: int = None): - db = context['db'] - return await _get_account_reputations_impl(db, True, account_lower_bound, limit) - -async def _get_account_reputations_impl(db, fat_node_style, account_lower_bound, limit): - """Enumerate account reputations.""" - assert isinstance(account_lower_bound, str), "invalid account_lower_bound type" - limit = valid_limit(limit, 1000, 1000) - - sql = "SELECT * FROM condenser_get_account_reputations( '{}', {}, {} )".format( account_lower_bound, account_lower_bound is None, limit ) - rows = await db.query_all(sql, start=account_lower_bound, limit=limit) - if fat_node_style: - return [dict(account=r[0], reputation=r[1]) for r in rows] - else: - return {'reputations': [dict(name=r[0], reputation=r[1]) for r in rows]} - -# Content Primitives - -@return_error_info -async def get_content(context, author: str, permlink: str, observer=None): - db = context['db'] - return await _get_content_impl(db, True, author, permlink, observer) - -@return_error_info -async def _get_content_impl(db, fat_node_style, author: str, permlink: str, observer=None): - """Get a single post object.""" - valid_account(author) - valid_permlink(permlink) - - sql = "SELECT * FROM condenser_get_content(:author, :permlink)" - - post = None - result = await db.query_all(sql, author=author, permlink=permlink) - if result: - result = dict(result[0]) - post = _condenser_post_object(result, 0, fat_node_style) - post['active_votes'] = await find_votes_impl(db, author, permlink, VotesPresentation.ActiveVotes if fat_node_style else VotesPresentation.CondenserApi) - if not observer: - post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all()) - else: - blacklists_for_user = await Mutes.get_blacklists_for_observer(observer, {'db':db}) - post['active_votes'] = _mute_votes(post['active_votes'], blacklists_for_user.keys()) - - return post - -@return_error_info -async def get_content_replies(context, author: str, permlink: str): - db = context['db'] - return await _get_content_replies_impl(db, True, author, permlink) - -@return_error_info -async def _get_content_replies_impl(db, fat_node_style, author: str, permlink: str): - """Get a list of post objects based on parent.""" - valid_account(author) - valid_permlink(permlink) - - sql = "SELECT * FROM condenser_get_content_replies(:author, :permlink)" - result = await db.query_all(sql, author=author, permlink=permlink) - - muted_accounts = Mutes.all() - - posts = [] - for row in result: - row = dict(row) - post = _condenser_post_object(row, get_content_additions=fat_node_style) - post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.ActiveVotes if fat_node_style else VotesPresentation.CondenserApi) - post['active_votes'] = _mute_votes(post['active_votes'], muted_accounts) - posts.append(post) - - return posts - -# Discussion Queries - -def nested_query_compat(function): - """Unpack strange format used by some clients, accepted by steemd. - - Sometimes a discussion query object is nested inside a list[1]. Eg: - - {... "method":"condenser_api.get_discussions_by_hot", - "params":[{"tag":"steem","limit":1}]} - - In these cases jsonrpcserver dispatch just shoves it into the first - arg. This decorator checks for this specific condition and unpacks - the query to be passed as kwargs. - """ - @wraps(function) - def wrapper(*args, **kwargs): - """Checks for specific condition signature and unpacks query""" - if args and not kwargs and len(args) == 2 and isinstance(args[1], dict): - return function(args[0], **args[1]) - return function(*args, **kwargs) - return wrapper - -@return_error_info -@nested_query_compat -async def get_posts_by_given_sort(context, sort: str, start_author: str = '', start_permlink: str = '', - limit: int = 20, tag: str = None, - truncate_body: int = 0, filter_tags: list = None): - """Query posts, sorted by creation date.""" - assert not filter_tags, 'filter_tags not supported' - - db = context['db'] - - start_author = valid_account(start_author, allow_empty=True), - start_permlink = valid_permlink(start_permlink, allow_empty=True), - limit = valid_limit(limit, 100, 20), - tag = valid_tag(tag, allow_empty=True) - - posts = [] - is_community = tag[:5] == 'hive-' - - if sort == 'created': - if is_community: - sql = "SELECT * FROM bridge_get_ranked_post_by_created_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )" - elif tag == '': - sql = "SELECT * FROM bridge_get_ranked_post_by_created( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - else: - sql = "SELECT * FROM bridge_get_ranked_post_by_created_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - elif sort == 'trending': - if is_community: - sql = "SELECT * FROM bridge_get_ranked_post_by_trends_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )" - elif tag == '': - sql = "SELECT * FROM bridge_get_ranked_post_by_trends( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - else: - sql = "SELECT * FROM bridge_get_ranked_post_by_trends_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - elif sort == 'hot': - if is_community: - sql = "SELECT * FROM bridge_get_ranked_post_by_hot_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - elif tag == '': - sql = "SELECT * FROM bridge_get_ranked_post_by_hot( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - else: - sql = "SELECT * FROM bridge_get_ranked_post_by_hot_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - elif sort == 'promoted': - if is_community: - sql = "SELECT * FROM bridge_get_ranked_post_by_promoted_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - elif tag == '': - sql = "SELECT * FROM bridge_get_ranked_post_by_promoted( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - else: - sql = "SELECT * FROM bridge_get_ranked_post_by_promoted_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - elif sort == 'post_by_payout': - if tag == '': - sql = "SELECT * FROM bridge_get_ranked_post_by_payout( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )" - else: - sql = "SELECT * FROM bridge_get_ranked_post_by_payout_for_category( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )" - elif sort == 'comment_by_payout': - if tag == '': - sql = "SELECT * FROM bridge_get_ranked_post_by_payout_comments( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - else: - sql = "SELECT * FROM bridge_get_ranked_post_by_payout_comments_for_category( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" - else: - return posts - - sql_result = await db.query_all(sql, tag=tag, author=start_author, permlink=start_permlink, limit=limit ) - - muted_accounts = Mutes.all() - for row in sql_result: - post = _condenser_post_object(row, truncate_body) - post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi) - post['active_votes'] = _mute_votes(post['active_votes'], muted_accounts) - posts.append(post) - return posts - -@return_error_info -@nested_query_compat -async def get_discussions_by_created(context, start_author: str = '', start_permlink: str = '', - limit: int = 20, tag: str = None, - truncate_body: int = 0, filter_tags: list = None): - return await get_posts_by_given_sort(context, 'created', start_author, start_permlink, limit, tag, truncate_body, filter_tags) - -@return_error_info -@nested_query_compat -async def get_discussions_by_trending(context, start_author: str = '', start_permlink: str = '', - limit: int = 20, tag: str = None, - truncate_body: int = 0, filter_tags: list = None): - return await get_posts_by_given_sort(context, 'trending', start_author, start_permlink, limit, tag, truncate_body, filter_tags) - -@return_error_info -@nested_query_compat -async def get_discussions_by_hot(context, start_author: str = '', start_permlink: str = '', - limit: int = 20, tag: str = None, - truncate_body: int = 0, filter_tags: list = None): - return await get_posts_by_given_sort(context, 'hot', start_author, start_permlink, limit, tag, truncate_body, filter_tags) - -@return_error_info -@nested_query_compat -async def get_discussions_by_promoted(context, start_author: str = '', start_permlink: str = '', - limit: int = 20, tag: str = None, - truncate_body: int = 0, filter_tags: list = None): - return await get_posts_by_given_sort(context, 'promoted', start_author, start_permlink, limit, tag, truncate_body, filter_tags) - -@return_error_info -@nested_query_compat -async def get_post_discussions_by_payout(context, start_author: str = '', start_permlink: str = '', - limit: int = 20, tag: str = None, - truncate_body: int = 0): - return await get_posts_by_given_sort(context, 'post_by_payout', start_author, start_permlink, limit, tag, truncate_body, []) - -@return_error_info -@nested_query_compat -async def get_comment_discussions_by_payout(context, start_author: str = '', start_permlink: str = '', - limit: int = 20, tag: str = None, - truncate_body: int = 0): - return await get_posts_by_given_sort(context, 'comment_by_payout', start_author, start_permlink, limit, tag, truncate_body, []) - -@return_error_info -@nested_query_compat -async def get_discussions_by_blog(context, tag: str = None, start_author: str = '', - start_permlink: str = '', limit: int = 20, - truncate_body: int = 0, filter_tags: list = None): - """Retrieve account's blog posts, including reblogs.""" - assert tag, '`tag` cannot be blank' - assert not filter_tags, 'filter_tags not supported' - valid_account(tag) - valid_account(start_author, allow_empty=True) - valid_permlink(start_permlink, allow_empty=True) - valid_limit(limit, 100, 20) - - sql = """ - SELECT * FROM get_discussions_by_blog(:author, :start_author, :start_permlink, :limit) - """ - - db = context['db'] - result = await db.query_all(sql, author=tag, start_author=start_author, start_permlink=start_permlink, limit=limit) - posts_by_id = [] - - for row in result: - row = dict(row) - post = _condenser_post_object(row, truncate_body=truncate_body) - post['active_votes'] = await find_votes_impl(db, post['author'], post['permlink'], VotesPresentation.CondenserApi) - post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all()) - #posts_by_id[row['post_id']] = post - posts_by_id.append(post) - - return posts_by_id - -@return_error_info -@nested_query_compat -async def get_discussions_by_feed(context, tag: str = None, start_author: str = '', - start_permlink: str = '', limit: int = 20, - truncate_body: int = 0, filter_tags: list = None): - """Retrieve account's personalized feed.""" - assert tag, '`tag` cannot be blank' - assert not filter_tags, 'filter_tags not supported' - return await get_by_feed_with_reblog_impl( - context['db'], - valid_account(tag), - valid_account(start_author, allow_empty=True), - valid_permlink(start_permlink, allow_empty=True), - valid_limit(limit, 100, 20), - truncate_body) - -@return_error_info -@nested_query_compat -async def get_discussions_by_comments(context, start_author: str = None, start_permlink: str = '', - limit: int = 20, truncate_body: int = 0, - filter_tags: list = None): - """Get comments by made by author.""" - assert start_author, '`start_author` cannot be blank' - assert not filter_tags, 'filter_tags not supported' - valid_account(start_author) - valid_permlink(start_permlink, allow_empty=True) - valid_limit(limit, 100, 20) - - posts = [] - db = context['db'] - - sql = " SELECT * FROM condenser_get_discussions_by_comments( '{}', '{}', {} ) ".format( start_author, start_permlink, limit ) - result = await db.query_all(sql) - - for row in result: - row = dict(row) - post = _condenser_post_object(row, truncate_body=truncate_body) - post['active_votes'] = await find_votes_impl(db, post['author'], post['permlink'], VotesPresentation.CondenserApi) - post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all()) - posts.append(post) - - return posts - -@return_error_info -@nested_query_compat -async def get_replies_by_last_update(context, start_author: str = None, start_permlink: str = '', - limit: int = 20, truncate_body: int = 0): - """Get all replies made to any of author's posts.""" - assert start_author, '`start_author` cannot be blank' - - return await cursor.get_by_replies_to_account( - context['db'], - valid_account(start_author), - valid_permlink(start_permlink, allow_empty=True), - valid_limit(limit, 100, 20), - truncate_body) - -@return_error_info -@nested_query_compat -async def get_discussions_by_author_before_date(context, author: str = None, start_permlink: str = '', - before_date: str = '', limit: int = 10): - """Retrieve account's blog posts, without reblogs. - - NOTE: before_date is completely ignored, and it appears to be broken and/or - completely ignored in steemd as well. This call is similar to - get_discussions_by_blog but does NOT serve reblogs. - """ - # pylint: disable=invalid-name,unused-argument - assert author, '`author` cannot be blank' - return await cursor.get_by_blog_without_reblog( - context['db'], - valid_account(author), - valid_permlink(start_permlink, allow_empty=True), - valid_limit(limit, 100, 10)) - -@return_error_info -@nested_query_compat -async def get_blog(context, account: str, start_entry_id: int = 0, limit: int = None): - """Get posts for an author's blog (w/ reblogs), paged by index/limit. - - Equivalent to get_discussions_by_blog, but uses offset-based pagination. - - Examples: (ABW: old description and examples were misleading as in many cases code worked differently, also now more cases actually work that gave error earlier) - (acct, -1, limit) for limit 1..500 - returns latest (no more than) limit posts - (acct, 0) - returns latest single post (ABW: this is a bug but I left it here because I'm afraid it was actively used - it should return oldest post) - (acct, 0, limit) for limit 1..500 - same as (acct, -1, limit) - see above - (acct, last_idx) for positive last_idx - returns last_idx oldest posts, or posts in range [last_idx..last_idx-500) when last_idx >= 500 - (acct, last_idx, limit) for positive last_idx and limit 1..500 - returns posts in range [last_idx..last_idx-limit) - """ - db = context['db'] - - account = valid_account(account) - if not start_entry_id: - start_entry_id = -1 - start_entry_id = valid_offset(start_entry_id) - if not limit: - limit = max(start_entry_id + 1, 1) - limit = min(limit, 500) - limit = valid_limit(limit, 500, None) - - sql = "SELECT * FROM condenser_get_blog(:account, :last, :limit)" - result = await db.query_all(sql, account=account, last=start_entry_id, limit=limit) - - muted_accounts = Mutes.all() - out = [] - for row in result: - row = dict(row) - post = _condenser_post_object(row) - - post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi) - post['active_votes'] = _mute_votes(post['active_votes'], muted_accounts) - - out.append({"blog": account, - "entry_id": row['entry_id'], - "comment": post, - "reblogged_on": json_date(row['reblogged_at'])}) - - return list(reversed(out)) - -@return_error_info -@nested_query_compat -async def get_blog_entries(context, account: str, start_entry_id: int = 0, limit: int = None): - """Get 'entries' for an author's blog (w/ reblogs), paged by index/limit. - - Interface identical to get_blog, but returns minimalistic post references. - """ - db = context['db'] - - account = valid_account(account) - if not start_entry_id: - start_entry_id = -1 - start_entry_id = valid_offset(start_entry_id) - if not limit: - limit = max(start_entry_id + 1, 1) - limit = min(limit, 500) - limit = valid_limit(limit, 500, None) - - sql = "SELECT * FROM condenser_get_blog_entries(:account, :last, :limit)" - result = await db.query_all(sql, account=account, last=start_entry_id, limit=limit) - - out = [] - for row in result: - row = dict(row) - out.append({"blog": account, - "entry_id": row['entry_id'], - "author": row['author'], - "permlink": row['permlink'], - "reblogged_on": json_date(row['reblogged_at'])}) - - return list(reversed(out)) - -@return_error_info -async def get_active_votes(context, author: str, permlink: str): - """ Returns all votes for the given post. """ - valid_account(author) - valid_permlink(permlink) - db = context['db'] - - return await find_votes_impl( db, author, permlink, VotesPresentation.ActiveVotes ) +"""Steemd/condenser_api compatibility layer API methods.""" +from functools import wraps + +import hive.server.condenser_api.cursor as cursor +from hive.server.condenser_api.objects import _mute_votes, _condenser_post_object +from hive.server.common.helpers import ( + ApiError, + return_error_info, + json_date, + valid_account, + valid_permlink, + valid_tag, + valid_offset, + valid_limit, + valid_follow_type) +from hive.server.common.mutes import Mutes +from hive.server.database_api.methods import find_votes_impl, VotesPresentation + +from hive.server.hive_api.public import get_by_feed_with_reblog_impl + +# pylint: disable=too-many-arguments,line-too-long,too-many-lines + +@return_error_info +async def get_account_votes(context, account): + """Return an info message about get_acccount_votes being unsupported.""" + # pylint: disable=unused-argument + assert False, "get_account_votes is no longer supported, for details see https://hive.blog/steemit/@steemitdev/additional-public-api-change" + + +# Follows Queries + +def _legacy_follower(follower, following, follow_type): + return dict(follower=follower, following=following, what=[follow_type]) + +@return_error_info +async def get_followers(context, account: str, start: str = '', follow_type: str = None, + limit: int = 1000, **kwargs): + """Get all accounts following `account`. (EOL)""" + # `type` reserved word workaround + if not follow_type and 'type' in kwargs: + follow_type = kwargs['type'] + if not follow_type: + follow_type = 'blog' + followers = await cursor.get_followers( + context['db'], + valid_account(account), + valid_account(start, allow_empty=True), + valid_follow_type(follow_type), + valid_limit(limit, 1000, 1000)) + return [_legacy_follower(name, account, follow_type) for name in followers] + +@return_error_info +async def get_following(context, account: str, start: str = '', follow_type: str = None, + limit: int = 1000, **kwargs): + """Get all accounts `account` follows. (EOL)""" + # `type` reserved word workaround + if not follow_type and 'type' in kwargs: + follow_type = kwargs['type'] + if not follow_type: + follow_type = 'blog' + following = await cursor.get_following( + context['db'], + valid_account(account), + valid_account(start, allow_empty=True), + valid_follow_type(follow_type), + valid_limit(limit, 1000, 1000)) + return [_legacy_follower(account, name, follow_type) for name in following] + +@return_error_info +async def get_follow_count(context, account: str): + """Get follow count stats. (EOL)""" + db = context['db'] + account = valid_account(account) + sql = "SELECT * FROM condenser_get_follow_count( (:account)::VARCHAR )" + counters = await db.query_row(sql, account=account) + return dict(account=account, + following_count=counters[0], + follower_count=counters[1]) + +@return_error_info +async def get_reblogged_by(context, author: str, permlink: str): + """Get all rebloggers of a post.""" + return await cursor.get_reblogged_by( + context['db'], + valid_account(author), + valid_permlink(permlink)) + +@return_error_info +async def get_account_reputations(context, account_lower_bound: str = None, limit: int = None): + db = context['db'] + return await _get_account_reputations_impl(db, True, account_lower_bound, limit) + +async def _get_account_reputations_impl(db, fat_node_style, account_lower_bound, limit): + """Enumerate account reputations.""" + assert isinstance(account_lower_bound, str), "invalid account_lower_bound type" + limit = valid_limit(limit, 1000, 1000) + + sql = "SELECT * FROM condenser_get_account_reputations( '{}', {}, {} )".format( account_lower_bound, account_lower_bound is None, limit ) + rows = await db.query_all(sql, start=account_lower_bound, limit=limit) + if fat_node_style: + return [dict(account=r[0], reputation=r[1]) for r in rows] + else: + return {'reputations': [dict(name=r[0], reputation=r[1]) for r in rows]} + +# Content Primitives + +@return_error_info +async def get_content(context, author: str, permlink: str, observer=None): + db = context['db'] + return await _get_content_impl(db, True, author, permlink, observer) + +@return_error_info +async def _get_content_impl(db, fat_node_style, author: str, permlink: str, observer=None): + """Get a single post object.""" + valid_account(author) + valid_permlink(permlink) + + sql = "SELECT * FROM condenser_get_content(:author, :permlink)" + + post = None + result = await db.query_all(sql, author=author, permlink=permlink) + if result: + result = dict(result[0]) + post = _condenser_post_object(result, 0, fat_node_style) + post['active_votes'] = await find_votes_impl(db, author, permlink, VotesPresentation.ActiveVotes if fat_node_style else VotesPresentation.CondenserApi) + if not observer: + post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all()) + else: + blacklists_for_user = await Mutes.get_blacklists_for_observer(observer, {'db':db}) + post['active_votes'] = _mute_votes(post['active_votes'], blacklists_for_user.keys()) + + return post + +@return_error_info +async def get_content_replies(context, author: str, permlink: str): + db = context['db'] + return await _get_content_replies_impl(db, True, author, permlink) + +@return_error_info +async def _get_content_replies_impl(db, fat_node_style, author: str, permlink: str): + """Get a list of post objects based on parent.""" + valid_account(author) + valid_permlink(permlink) + + sql = "SELECT * FROM condenser_get_content_replies(:author, :permlink)" + result = await db.query_all(sql, author=author, permlink=permlink) + + muted_accounts = Mutes.all() + + posts = [] + for row in result: + row = dict(row) + post = _condenser_post_object(row, get_content_additions=fat_node_style) + post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.ActiveVotes if fat_node_style else VotesPresentation.CondenserApi) + post['active_votes'] = _mute_votes(post['active_votes'], muted_accounts) + posts.append(post) + + return posts + +# Discussion Queries + +def nested_query_compat(function): + """Unpack strange format used by some clients, accepted by steemd. + + Sometimes a discussion query object is nested inside a list[1]. Eg: + + {... "method":"condenser_api.get_discussions_by_hot", + "params":[{"tag":"steem","limit":1}]} + + In these cases jsonrpcserver dispatch just shoves it into the first + arg. This decorator checks for this specific condition and unpacks + the query to be passed as kwargs. + """ + @wraps(function) + def wrapper(*args, **kwargs): + """Checks for specific condition signature and unpacks query""" + if args and not kwargs and len(args) == 2 and isinstance(args[1], dict): + return function(args[0], **args[1]) + return function(*args, **kwargs) + return wrapper + +@return_error_info +@nested_query_compat +async def get_posts_by_given_sort(context, sort: str, start_author: str = '', start_permlink: str = '', + limit: int = 20, tag: str = None, + truncate_body: int = 0, filter_tags: list = None): + """Query posts, sorted by creation date.""" + assert not filter_tags, 'filter_tags not supported' + + db = context['db'] + + start_author = valid_account(start_author, allow_empty=True), + start_permlink = valid_permlink(start_permlink, allow_empty=True), + limit = valid_limit(limit, 100, 20), + tag = valid_tag(tag, allow_empty=True) + + posts = [] + is_community = tag[:5] == 'hive-' + + if sort == 'created': + if is_community: + sql = "SELECT * FROM bridge_get_ranked_post_by_created_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )" + elif tag == '': + sql = "SELECT * FROM bridge_get_ranked_post_by_created( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + else: + sql = "SELECT * FROM bridge_get_ranked_post_by_created_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + elif sort == 'trending': + if is_community: + sql = "SELECT * FROM bridge_get_ranked_post_by_trends_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )" + elif tag == '': + sql = "SELECT * FROM bridge_get_ranked_post_by_trends( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + else: + sql = "SELECT * FROM bridge_get_ranked_post_by_trends_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + elif sort == 'hot': + if is_community: + sql = "SELECT * FROM bridge_get_ranked_post_by_hot_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + elif tag == '': + sql = "SELECT * FROM bridge_get_ranked_post_by_hot( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + else: + sql = "SELECT * FROM bridge_get_ranked_post_by_hot_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + elif sort == 'promoted': + if is_community: + sql = "SELECT * FROM bridge_get_ranked_post_by_promoted_for_community( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + elif tag == '': + sql = "SELECT * FROM bridge_get_ranked_post_by_promoted( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + else: + sql = "SELECT * FROM bridge_get_ranked_post_by_promoted_for_tag( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + elif sort == 'post_by_payout': + if tag == '': + sql = "SELECT * FROM bridge_get_ranked_post_by_payout( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )" + else: + sql = "SELECT * FROM bridge_get_ranked_post_by_payout_for_category( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT, False )" + elif sort == 'comment_by_payout': + if tag == '': + sql = "SELECT * FROM bridge_get_ranked_post_by_payout_comments( (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + else: + sql = "SELECT * FROM bridge_get_ranked_post_by_payout_comments_for_category( (:tag)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::SMALLINT )" + else: + return posts + + sql_result = await db.query_all(sql, tag=tag, author=start_author, permlink=start_permlink, limit=limit ) + + muted_accounts = Mutes.all() + for row in sql_result: + post = _condenser_post_object(row, truncate_body) + post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi) + post['active_votes'] = _mute_votes(post['active_votes'], muted_accounts) + posts.append(post) + return posts + +@return_error_info +@nested_query_compat +async def get_discussions_by_created(context, start_author: str = '', start_permlink: str = '', + limit: int = 20, tag: str = None, + truncate_body: int = 0, filter_tags: list = None): + return await get_posts_by_given_sort(context, 'created', start_author, start_permlink, limit, tag, truncate_body, filter_tags) + +@return_error_info +@nested_query_compat +async def get_discussions_by_trending(context, start_author: str = '', start_permlink: str = '', + limit: int = 20, tag: str = None, + truncate_body: int = 0, filter_tags: list = None): + return await get_posts_by_given_sort(context, 'trending', start_author, start_permlink, limit, tag, truncate_body, filter_tags) + +@return_error_info +@nested_query_compat +async def get_discussions_by_hot(context, start_author: str = '', start_permlink: str = '', + limit: int = 20, tag: str = None, + truncate_body: int = 0, filter_tags: list = None): + return await get_posts_by_given_sort(context, 'hot', start_author, start_permlink, limit, tag, truncate_body, filter_tags) + +@return_error_info +@nested_query_compat +async def get_discussions_by_promoted(context, start_author: str = '', start_permlink: str = '', + limit: int = 20, tag: str = None, + truncate_body: int = 0, filter_tags: list = None): + return await get_posts_by_given_sort(context, 'promoted', start_author, start_permlink, limit, tag, truncate_body, filter_tags) + +@return_error_info +@nested_query_compat +async def get_post_discussions_by_payout(context, start_author: str = '', start_permlink: str = '', + limit: int = 20, tag: str = None, + truncate_body: int = 0): + return await get_posts_by_given_sort(context, 'post_by_payout', start_author, start_permlink, limit, tag, truncate_body, []) + +@return_error_info +@nested_query_compat +async def get_comment_discussions_by_payout(context, start_author: str = '', start_permlink: str = '', + limit: int = 20, tag: str = None, + truncate_body: int = 0): + return await get_posts_by_given_sort(context, 'comment_by_payout', start_author, start_permlink, limit, tag, truncate_body, []) + +@return_error_info +@nested_query_compat +async def get_discussions_by_blog(context, tag: str = None, start_author: str = '', + start_permlink: str = '', limit: int = 20, + truncate_body: int = 0, filter_tags: list = None): + """Retrieve account's blog posts, including reblogs.""" + assert tag, '`tag` cannot be blank' + assert not filter_tags, 'filter_tags not supported' + valid_account(tag) + valid_account(start_author, allow_empty=True) + valid_permlink(start_permlink, allow_empty=True) + valid_limit(limit, 100, 20) + + sql = """ + SELECT * FROM get_discussions_by_blog(:author, :start_author, :start_permlink, :limit) + """ + + db = context['db'] + result = await db.query_all(sql, author=tag, start_author=start_author, start_permlink=start_permlink, limit=limit) + posts_by_id = [] + + for row in result: + row = dict(row) + post = _condenser_post_object(row, truncate_body=truncate_body) + post['active_votes'] = await find_votes_impl(db, post['author'], post['permlink'], VotesPresentation.CondenserApi) + post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all()) + #posts_by_id[row['post_id']] = post + posts_by_id.append(post) + + return posts_by_id + +@return_error_info +@nested_query_compat +async def get_discussions_by_feed(context, tag: str = None, start_author: str = '', + start_permlink: str = '', limit: int = 20, + truncate_body: int = 0, filter_tags: list = None): + """Retrieve account's personalized feed.""" + assert tag, '`tag` cannot be blank' + assert not filter_tags, 'filter_tags not supported' + return await get_by_feed_with_reblog_impl( + context['db'], + valid_account(tag), + valid_account(start_author, allow_empty=True), + valid_permlink(start_permlink, allow_empty=True), + valid_limit(limit, 100, 20), + truncate_body) + +@return_error_info +@nested_query_compat +async def get_discussions_by_comments(context, start_author: str = None, start_permlink: str = '', + limit: int = 20, truncate_body: int = 0, + filter_tags: list = None): + """Get comments by made by author.""" + assert start_author, '`start_author` cannot be blank' + assert not filter_tags, 'filter_tags not supported' + valid_account(start_author) + valid_permlink(start_permlink, allow_empty=True) + valid_limit(limit, 100, 20) + + posts = [] + db = context['db'] + + sql = " SELECT * FROM condenser_get_discussions_by_comments( '{}', '{}', {} ) ".format( start_author, start_permlink, limit ) + result = await db.query_all(sql) + + for row in result: + row = dict(row) + post = _condenser_post_object(row, truncate_body=truncate_body) + post['active_votes'] = await find_votes_impl(db, post['author'], post['permlink'], VotesPresentation.CondenserApi) + post['active_votes'] = _mute_votes(post['active_votes'], Mutes.all()) + posts.append(post) + + return posts + +@return_error_info +@nested_query_compat +async def get_replies_by_last_update(context, start_author: str = None, start_permlink: str = '', + limit: int = 20, truncate_body: int = 0): + """Get all replies made to any of author's posts.""" + assert start_author, '`start_author` cannot be blank' + + return await cursor.get_by_replies_to_account( + context['db'], + valid_account(start_author), + valid_permlink(start_permlink, allow_empty=True), + valid_limit(limit, 100, 20), + truncate_body) + +@return_error_info +@nested_query_compat +async def get_discussions_by_author_before_date(context, author: str = None, start_permlink: str = '', + before_date: str = '', limit: int = 10): + """Retrieve account's blog posts, without reblogs. + + NOTE: before_date is completely ignored, and it appears to be broken and/or + completely ignored in steemd as well. This call is similar to + get_discussions_by_blog but does NOT serve reblogs. + """ + # pylint: disable=invalid-name,unused-argument + assert author, '`author` cannot be blank' + return await cursor.get_by_blog_without_reblog( + context['db'], + valid_account(author), + valid_permlink(start_permlink, allow_empty=True), + valid_limit(limit, 100, 10)) + +@return_error_info +@nested_query_compat +async def get_blog(context, account: str, start_entry_id: int = 0, limit: int = None): + """Get posts for an author's blog (w/ reblogs), paged by index/limit. + + Equivalent to get_discussions_by_blog, but uses offset-based pagination. + + Examples: (ABW: old description and examples were misleading as in many cases code worked differently, also now more cases actually work that gave error earlier) + (acct, -1, limit) for limit 1..500 - returns latest (no more than) limit posts + (acct, 0) - returns latest single post (ABW: this is a bug but I left it here because I'm afraid it was actively used - it should return oldest post) + (acct, 0, limit) for limit 1..500 - same as (acct, -1, limit) - see above + (acct, last_idx) for positive last_idx - returns last_idx oldest posts, or posts in range [last_idx..last_idx-500) when last_idx >= 500 + (acct, last_idx, limit) for positive last_idx and limit 1..500 - returns posts in range [last_idx..last_idx-limit) + """ + db = context['db'] + + account = valid_account(account) + if not start_entry_id: + start_entry_id = -1 + start_entry_id = valid_offset(start_entry_id) + if not limit: + limit = max(start_entry_id + 1, 1) + limit = min(limit, 500) + limit = valid_limit(limit, 500, None) + + sql = "SELECT * FROM condenser_get_blog(:account, :last, :limit)" + result = await db.query_all(sql, account=account, last=start_entry_id, limit=limit) + + muted_accounts = Mutes.all() + out = [] + for row in result: + row = dict(row) + post = _condenser_post_object(row) + + post['active_votes'] = await find_votes_impl(db, row['author'], row['permlink'], VotesPresentation.CondenserApi) + post['active_votes'] = _mute_votes(post['active_votes'], muted_accounts) + + out.append({"blog": account, + "entry_id": row['entry_id'], + "comment": post, + "reblogged_on": json_date(row['reblogged_at'])}) + + return list(reversed(out)) + +@return_error_info +@nested_query_compat +async def get_blog_entries(context, account: str, start_entry_id: int = 0, limit: int = None): + """Get 'entries' for an author's blog (w/ reblogs), paged by index/limit. + + Interface identical to get_blog, but returns minimalistic post references. + """ + db = context['db'] + + account = valid_account(account) + if not start_entry_id: + start_entry_id = -1 + start_entry_id = valid_offset(start_entry_id) + if not limit: + limit = max(start_entry_id + 1, 1) + limit = min(limit, 500) + limit = valid_limit(limit, 500, None) + + sql = "SELECT * FROM condenser_get_blog_entries(:account, :last, :limit)" + result = await db.query_all(sql, account=account, last=start_entry_id, limit=limit) + + out = [] + for row in result: + row = dict(row) + out.append({"blog": account, + "entry_id": row['entry_id'], + "author": row['author'], + "permlink": row['permlink'], + "reblogged_on": json_date(row['reblogged_at'])}) + + return list(reversed(out)) + +@return_error_info +async def get_active_votes(context, author: str, permlink: str): + """ Returns all votes for the given post. """ + valid_account(author) + valid_permlink(permlink) + db = context['db'] + + return await find_votes_impl( db, author, permlink, VotesPresentation.ActiveVotes )