diff --git a/hive/db/sql_scripts/hive_post_operations.sql b/hive/db/sql_scripts/hive_post_operations.sql
index 099380bbc3cbb403d4f5aca9f71556d1b6b0c860..611678d8bf2a15c298563f62015207a922384704 100644
--- a/hive/db/sql_scripts/hive_post_operations.sql
+++ b/hive/db/sql_scripts/hive_post_operations.sql
@@ -144,12 +144,13 @@ END
 $function$
 ;
 
-DROP FUNCTION if exists delete_hive_post(character varying,character varying,character varying, integer)
+DROP FUNCTION if exists delete_hive_post(character varying,character varying,character varying, integer, timestamp)
 ;
 CREATE OR REPLACE FUNCTION delete_hive_post(
   in _author hive_accounts.name%TYPE,
   in _permlink hive_permlink_data.permlink%TYPE,
-  in _block_num hive_blocks.num%TYPE)
+  in _block_num hive_blocks.num%TYPE,
+  in _date hive_posts.active%TYPE)
 RETURNS VOID
 LANGUAGE plpgsql
 AS
@@ -174,6 +175,7 @@ BEGIN
       WHERE ha.name = _author AND hpd.permlink = _permlink
   )
   ,block_num = _block_num
+  ,active = _date
   WHERE id = __post_id;
 
   DELETE FROM hive_reblogs
diff --git a/hive/db/sql_scripts/utility_functions.sql b/hive/db/sql_scripts/utility_functions.sql
index 896de4fe59565145d20cc5fe54be59ebc54f7754..ae326d9a0b436e81d6683c11d9e5dc144890c537 100644
--- a/hive/db/sql_scripts/utility_functions.sql
+++ b/hive/db/sql_scripts/utility_functions.sql
@@ -124,3 +124,27 @@ BEGIN
 END
 $function$
 ;
+
+DROP FUNCTION IF EXISTS public.find_community_id CASCADE
+;
+CREATE OR REPLACE FUNCTION public.find_community_id(
+    in _community_name hive_communities.name%TYPE,
+    in _check BOOLEAN
+)
+RETURNS INTEGER
+LANGUAGE 'plpgsql' STABLE
+AS
+$function$
+DECLARE
+  __community_id INT = 0;
+BEGIN
+  IF (_community_name <> '') THEN
+    SELECT INTO __community_id COALESCE( ( SELECT id FROM hive_communities WHERE name=_community_name ), 0 );
+    IF _check AND __community_id = 0 THEN
+      RAISE EXCEPTION 'Community % does not exist', _community_name;
+    END IF;
+  END IF;
+  RETURN __community_id;
+END
+$function$
+;
diff --git a/hive/indexer/blocks.py b/hive/indexer/blocks.py
index 2de572b06162d31f1c0a81a35a2c8726829c8621..89050cace91b6fc89207cbbb41f5199c9cb91ca1 100644
--- a/hive/indexer/blocks.py
+++ b/hive/indexer/blocks.py
@@ -266,7 +266,7 @@ class Blocks:
                 elif op_type == 'delete_comment_operation':
                     key = "{}/{}".format(op['author'], op['permlink'])
                     if ( ineffective_deleted_ops is None ) or ( key not in ineffective_deleted_ops ):
-                        Posts.delete_op(op)
+                        Posts.delete_op(op, cls._head_block_date)
                 elif op_type == 'comment_options_operation':
                     Posts.comment_options_op(op)
                 elif op_type == 'vote_operation':
diff --git a/hive/indexer/posts.py b/hive/indexer/posts.py
index 96f535be9123278163ebb92ddbb302661329d8ee..090667bfefb1d9b86259bbb7ceb3f2227bdcee74 100644
--- a/hive/indexer/posts.py
+++ b/hive/indexer/posts.py
@@ -1,436 +1,436 @@
-"""Core posts manager."""
-
-import logging
-import collections
-
-from ujson import dumps, loads
-
-from diff_match_patch import diff_match_patch
-
-from hive.db.adapter import Db
-from hive.db.db_state import DbState
-
-from hive.indexer.reblog import Reblog
-from hive.indexer.community import Community
-from hive.indexer.notify import Notify
-from hive.indexer.post_data_cache import PostDataCache
-from hive.indexer.db_adapter_holder import DbAdapterHolder
-from hive.utils.misc import chunks
-
-from hive.utils.normalize import sbd_amount, legacy_amount, safe_img_url, escape_characters
-
-log = logging.getLogger(__name__)
-DB = Db.instance()
-
-class Posts(DbAdapterHolder):
-    """Handles critical/core post ops and data."""
-
-    # LRU cache for (author-permlink -> id) lookup (~400mb per 1M entries)
-    CACHE_SIZE = 2000000
-    _ids = collections.OrderedDict()
-    _hits = 0
-    _miss = 0
-
-    comment_payout_ops = {}
-    _comment_payout_ops = []
-
-    @classmethod
-    def last_id(cls):
-        """Get the last indexed post id."""
-        sql = "SELECT MAX(id) FROM hive_posts WHERE counter_deleted = 0"
-        return DB.query_one(sql) or 0
-
-    @classmethod
-    def get_id(cls, author, permlink):
-        """Look up id by author/permlink, making use of LRU cache."""
-        url = author+'/'+permlink
-        if url in cls._ids:
-            cls._hits += 1
-            _id = cls._ids.pop(url)
-            cls._ids[url] = _id
-        else:
-            cls._miss += 1
-            sql = """
-                SELECT hp.id
-                FROM hive_posts hp
-                INNER JOIN hive_accounts ha_a ON ha_a.id = hp.author_id
-                INNER JOIN hive_permlink_data hpd_p ON hpd_p.id = hp.permlink_id
-                WHERE ha_a.name = :a AND hpd_p.permlink = :p
-            """
-            _id = DB.query_one(sql, a=author, p=permlink)
-            if _id:
-                cls._set_id(url, _id)
-
-        # cache stats (under 10M every 10K else every 100K)
-        total = cls._hits + cls._miss
-        if total % 100000 == 0:
-            log.info("pid lookups: %d, hits: %d (%.1f%%), entries: %d",
-                     total, cls._hits, 100.0*cls._hits/total, len(cls._ids))
-
-        return _id
-
-    @classmethod
-    def _set_id(cls, url, pid):
-        """Add an entry to the LRU, maintaining max size."""
-        assert pid, "no pid provided for %s" % url
-        if len(cls._ids) > cls.CACHE_SIZE:
-            cls._ids.popitem(last=False)
-        cls._ids[url] = pid
-
-    @classmethod
-    def delete_op(cls, op):
-        """Given a delete_comment op, mark the post as deleted.
-
-        Also remove it from post-cache and feed-cache.
-        """
-        cls.delete(op)
-
-    @classmethod
-    def comment_op(cls, op, block_date):
-        """Register new/edited/undeleted posts; insert into feed cache."""
-
-        md = {}
-        # At least one case where jsonMetadata was double-encoded: condenser#895
-        # jsonMetadata = JSON.parse(jsonMetadata);
-        try:
-            md = loads(op['json_metadata'])
-            if not isinstance(md, dict):
-                md = {}
-        except Exception:
-            pass
-
-        tags = []
-        if md and 'tags' in md and isinstance(md['tags'], list):
-            tags = md['tags']
-
-        sql = """
-            SELECT is_new_post, id, author_id, permlink_id, post_category, parent_id, community_id, is_valid, is_muted, depth
-            FROM process_hive_post_operation((:author)::varchar, (:permlink)::varchar, (:parent_author)::varchar, (:parent_permlink)::varchar, (:date)::timestamp, (:community_support_start_block)::integer, (:block_num)::integer, (:tags)::VARCHAR[]);
-            """
-
-        row = DB.query_row(sql, author=op['author'], permlink=op['permlink'], parent_author=op['parent_author'],
-                   parent_permlink=op['parent_permlink'], date=block_date, community_support_start_block=Community.start_block, block_num=op['block_num'], tags=tags)
-
-        result = dict(row)
-
-        # TODO we need to enhance checking related community post validation and honor is_muted.
-        error = cls._verify_post_against_community(op, result['community_id'], result['is_valid'], result['is_muted'])
-
-        cls._set_id(op['author']+'/'+op['permlink'], result['id'])
-
-        img_url = None
-        if 'image' in md:
-            img_url = md['image']
-            if isinstance(img_url, list) and img_url:
-                img_url = img_url[0]
-        if img_url:
-            img_url = safe_img_url(img_url)
-
-        is_new_post = result['is_new_post']
-        if is_new_post:
-            # add content data to hive_post_data
-            post_data = dict(title=op['title'] if op['title'] else '',
-                             img_url=img_url if img_url else '',
-                             body=op['body'] if op['body'] else '',
-                             json=op['json_metadata'] if op['json_metadata'] else '')
-        else:
-            # edit case. Now we need to (potentially) apply patch to the post body.
-            # empty new body means no body edit, not clear (same with other data)
-            new_body = cls._merge_post_body(id=result['id'], new_body_def=op['body']) if op['body'] else None
-            new_title = op['title'] if op['title'] else None
-            new_json = op['json_metadata'] if op['json_metadata'] else None
-            # when 'new_json' is not empty, 'img_url' should be overwritten even if it is itself empty
-            new_img = img_url if img_url else '' if new_json else None
-            post_data = dict(title=new_title, img_url=new_img, body=new_body, json=new_json)
-
-#        log.info("Adding author: {}  permlink: {}".format(op['author'], op['permlink']))
-        PostDataCache.add_data(result['id'], post_data, is_new_post)
-
-        if not DbState.is_initial_sync():
-            if error:
-                author_id = result['author_id']
-                Notify(block_num=op['block_num'], type_id='error', dst_id=author_id, when=block_date,
-                       post_id=result['id'], payload=error)
-
-    @classmethod
-    def flush_into_db(cls):
-        sql = """
-              UPDATE hive_posts AS ihp SET
-                  total_payout_value    = COALESCE( data_source.total_payout_value,                     ihp.total_payout_value ),
-                  curator_payout_value  = COALESCE( data_source.curator_payout_value,                   ihp.curator_payout_value ),
-                  author_rewards        = CAST( data_source.author_rewards as BIGINT ) + ihp.author_rewards,
-                  author_rewards_hive   = COALESCE( CAST( data_source.author_rewards_hive as BIGINT ),  ihp.author_rewards_hive ),
-                  author_rewards_hbd    = COALESCE( CAST( data_source.author_rewards_hbd as BIGINT ),   ihp.author_rewards_hbd ),
-                  author_rewards_vests  = COALESCE( CAST( data_source.author_rewards_vests as BIGINT ), ihp.author_rewards_vests ),
-                  payout                = COALESCE( CAST( data_source.payout as DECIMAL ),              ihp.payout ),
-                  pending_payout        = COALESCE( CAST( data_source.pending_payout as DECIMAL ),      ihp.pending_payout ),
-                  payout_at             = COALESCE( CAST( data_source.payout_at as TIMESTAMP ),         ihp.payout_at ),
-                  last_payout_at        = COALESCE( CAST( data_source.last_payout_at as TIMESTAMP ),    ihp.last_payout_at ),
-                  cashout_time          = COALESCE( CAST( data_source.cashout_time as TIMESTAMP ),      ihp.cashout_time ),
-                  is_paidout            = COALESCE( CAST( data_source.is_paidout as BOOLEAN ),          ihp.is_paidout ),
-                  total_vote_weight     = COALESCE( CAST( data_source.total_vote_weight as NUMERIC ),   ihp.total_vote_weight )
-              FROM
-              (
-              SELECT  ha_a.id as author_id, hpd_p.id as permlink_id,
-                      t.total_payout_value,
-                      t.curator_payout_value,
-                      t.author_rewards,
-                      t.author_rewards_hive,
-                      t.author_rewards_hbd,
-                      t.author_rewards_vests,
-                      t.payout,
-                      t.pending_payout,
-                      t.payout_at,
-                      t.last_payout_at,
-                      t.cashout_time,
-                      t.is_paidout,
-                      t.total_vote_weight
-              from
-              (
-              VALUES
-                --- put all constant values here
-                {}
-              ) AS T(author, permlink,
-                      total_payout_value,
-                      curator_payout_value,
-                      author_rewards,
-                      author_rewards_hive,
-                      author_rewards_hbd,
-                      author_rewards_vests,
-                      payout,
-                      pending_payout,
-                      payout_at,
-                      last_payout_at,
-                      cashout_time,
-                      is_paidout,
-                      total_vote_weight)
-              INNER JOIN hive_accounts ha_a ON ha_a.name = t.author
-              INNER JOIN hive_permlink_data hpd_p ON hpd_p.permlink = t.permlink
-              ) as data_source
-              WHERE ihp.permlink_id = data_source.permlink_id and ihp.author_id = data_source.author_id
-        """
-
-        for chunk in chunks(cls._comment_payout_ops, 1000):
-            cls.beginTx()
-
-            values_str = ','.join(chunk)
-            actual_query = sql.format(values_str)
-            cls.db.query(actual_query)
-
-            cls.commitTx()
-
-        n = len(cls._comment_payout_ops)
-        cls._comment_payout_ops.clear()
-        return n
-
-    @classmethod
-    def comment_payout_op(cls):
-        values_limit = 1000
-
-        """ Process comment payment operations """
-        for k, v in cls.comment_payout_ops.items():
-            author                    = None
-            permlink                  = None
-
-            # author payouts
-            author_rewards            = 0
-            author_rewards_hive       = None
-            author_rewards_hbd        = None
-            author_rewards_vests      = None
-
-            # total payout for comment
-            #comment_author_reward     = None
-            #curators_vesting_payout   = None
-            total_payout_value        = None;
-            curator_payout_value      = None;
-            #beneficiary_payout_value  = None;
-
-            payout                    = None
-            pending_payout            = None
-
-            payout_at                 = None
-            last_payout_at            = None
-            cashout_time              = None
-
-            is_paidout                = None
-
-            total_vote_weight         = None
-
-            # final payout indicator - by default all rewards are zero, but might be overwritten by other operations
-            if v[ 'comment_payout_update_operation' ] is not None:
-              value, date = v[ 'comment_payout_update_operation' ]
-              if author is None:
-                author = value['author']
-                permlink = value['permlink']
-              is_paidout              = True
-              payout_at               = date
-              last_payout_at          = date
-              cashout_time            = "infinity"
-
-              pending_payout          = 0
-
-            # author rewards in current (final or nonfinal) payout (always comes with comment_reward_operation)
-            if v[ 'author_reward_operation' ] is not None:
-              value, date = v[ 'author_reward_operation' ]
-              if author is None:
-                author = value['author']
-                permlink = value['permlink']
-              author_rewards_hive     = value['hive_payout']['amount']
-              author_rewards_hbd      = value['hbd_payout']['amount']
-              author_rewards_vests    = value['vesting_payout']['amount']
-              #curators_vesting_payout = value['curators_vesting_payout']['amount']
-
-            # summary of comment rewards in current (final or nonfinal) payout (always comes with author_reward_operation)
-            if v[ 'comment_reward_operation' ] is not None:
-              value, date = v[ 'comment_reward_operation' ]
-              if author is None:
-                author = value['author']
-                permlink = value['permlink']
-              #comment_author_reward   = value['payout']
-              author_rewards          = value['author_rewards']
-              total_payout_value      = value['total_payout_value']
-              curator_payout_value    = value['curator_payout_value']
-              #beneficiary_payout_value = value['beneficiary_payout_value']
-
-              payout = sum([ sbd_amount(total_payout_value), sbd_amount(curator_payout_value) ])
-              pending_payout = 0
-              last_payout_at = date
-
-            # estimated pending_payout from vote (if exists with actual payout the value comes from vote cast after payout)
-            if v[ 'effective_comment_vote_operation' ] is not None:
-              value, date = v[ 'effective_comment_vote_operation' ]
-              if author is None:
-                author = value['author']
-                permlink = value['permlink']
-              pending_payout          = sbd_amount( value['pending_payout'] )
-              total_vote_weight       = value['total_vote_weight']
-
-
-            cls._comment_payout_ops.append("('{}', {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {})".format(
-              author,
-              escape_characters(permlink),
-              "NULL" if ( total_payout_value is None ) else ( "'{}'".format( legacy_amount(total_payout_value) ) ),
-              "NULL" if ( curator_payout_value is None ) else ( "'{}'".format( legacy_amount(curator_payout_value) ) ),
-              author_rewards,
-              "NULL" if ( author_rewards_hive is None ) else author_rewards_hive,
-              "NULL" if ( author_rewards_hbd is None ) else author_rewards_hbd,
-              "NULL" if ( author_rewards_vests is None ) else author_rewards_vests,
-              "NULL" if ( payout is None ) else payout,
-              "NULL" if ( pending_payout is None ) else pending_payout,
-
-              "NULL" if ( payout_at is None ) else ( "'{}'::timestamp".format( payout_at ) ),
-              "NULL" if ( last_payout_at is None ) else ( "'{}'::timestamp".format( last_payout_at ) ),
-              "NULL" if ( cashout_time is None ) else ( "'{}'::timestamp".format( cashout_time ) ),
-
-              "NULL" if ( is_paidout is None ) else is_paidout,
-
-              "NULL" if ( total_vote_weight is None ) else total_vote_weight ))
-
-
-        n = len(cls.comment_payout_ops)
-        cls.comment_payout_ops.clear()
-        return n
-
-    @classmethod
-    def update_child_count(cls, child_id, op='+'):
-        """ Increase/decrease child count by 1 """
-        sql = """
-            UPDATE
-                hive_posts
-            SET
-                children = GREATEST(0, (
-                    SELECT
-                        CASE
-                            WHEN children is NULL THEN 0
-                            WHEN children=32762 THEN 0
-                            ELSE children
-                        END
-                    FROM
-                        hive_posts
-                    WHERE id = (SELECT parent_id FROM hive_posts WHERE id = :child_id)
-                )::int
-        """
-        if op == '+':
-            sql += """ + 1)"""
-        else:
-            sql += """ - 1)"""
-        sql += """ WHERE id = (SELECT parent_id FROM hive_posts WHERE id = :child_id)"""
-
-        DB.query(sql, child_id=child_id)
-
-    @classmethod
-    def comment_options_op(cls, op):
-        """ Process comment_options_operation """
-        max_accepted_payout = legacy_amount(op['max_accepted_payout']) if 'max_accepted_payout' in op else '1000000.000 HBD'
-        allow_votes = op['allow_votes'] if 'allow_votes' in op else True
-        allow_curation_rewards = op['allow_curation_rewards'] if 'allow_curation_rewards' in op else True
-        percent_hbd = op['percent_hbd'] if 'percent_hbd' in op else 10000
-        extensions = op['extensions'] if 'extensions' in op else []
-        beneficiaries = []
-        for ex in extensions:
-            if 'type' in ex and ex['type'] == 'comment_payout_beneficiaries' and 'beneficiaries' in ex['value']:
-                beneficiaries = ex['value']['beneficiaries']
-        sql = """
-            UPDATE
-                hive_posts hp
-            SET
-                max_accepted_payout = :max_accepted_payout,
-                percent_hbd = :percent_hbd,
-                allow_votes = :allow_votes,
-                allow_curation_rewards = :allow_curation_rewards,
-                beneficiaries = :beneficiaries
-            WHERE
-            hp.author_id = (SELECT id FROM hive_accounts WHERE name = :author) AND
-            hp.permlink_id = (SELECT id FROM hive_permlink_data WHERE permlink = :permlink)
-        """
-        DB.query(sql, author=op['author'], permlink=op['permlink'], max_accepted_payout=max_accepted_payout,
-                 percent_hbd=percent_hbd, allow_votes=allow_votes, allow_curation_rewards=allow_curation_rewards,
-                 beneficiaries=dumps(beneficiaries))
-
-    @classmethod
-    def delete(cls, op):
-        """Marks a post record as being deleted."""
-        sql = "SELECT delete_hive_post((:author)::varchar, (:permlink)::varchar, (:block_num)::int );"
-        DB.query_no_return(sql, author=op['author'], permlink = op['permlink'], block_num=op['block_num'])
-
-    @classmethod
-    def _verify_post_against_community(cls, op, community_id, is_valid, is_muted):
-        error = None
-        if community_id and is_valid and not Community.is_post_valid(community_id, op):
-            error = 'not authorized'
-            #is_valid = False # TODO: reserved for future blacklist status?
-            is_muted = True
-        return error
-
-    @classmethod
-    def _merge_post_body(cls, id, new_body_def):
-        new_body = ''
-        old_body = ''
-
-        try:
-            dmp = diff_match_patch()
-            patch = dmp.patch_fromText(new_body_def)
-            if patch is not None and len(patch):
-                old_body = PostDataCache.get_post_body(id)
-                new_body, _ = dmp.patch_apply(patch, old_body)
-                #new_utf8_body = new_body.decode('utf-8')
-                #new_body = new_utf8_body
-            else:
-                new_body = new_body_def
-        except ValueError as e:
-#            log.info("Merging a body post id: {} caused an ValueError exception {}".format(id, e))
-#            log.info("New body definition: {}".format(new_body_def))
-#            log.info("Old body definition: {}".format(old_body))
-            new_body = new_body_def
-        except Exception as ex:
-            log.info("Merging a body post id: {} caused an unknown exception {}".format(id, ex))
-            log.info("New body definition: {}".format(new_body_def))
-            log.info("Old body definition: {}".format(old_body))
-            new_body = new_body_def
-
-        return new_body
-
-
-    @classmethod
-    def flush(cls):
-      return cls.comment_payout_op() + cls.flush_into_db()
+"""Core posts manager."""
+
+import logging
+import collections
+
+from ujson import dumps, loads
+
+from diff_match_patch import diff_match_patch
+
+from hive.db.adapter import Db
+from hive.db.db_state import DbState
+
+from hive.indexer.reblog import Reblog
+from hive.indexer.community import Community
+from hive.indexer.notify import Notify
+from hive.indexer.post_data_cache import PostDataCache
+from hive.indexer.db_adapter_holder import DbAdapterHolder
+from hive.utils.misc import chunks
+
+from hive.utils.normalize import sbd_amount, legacy_amount, safe_img_url, escape_characters
+
+log = logging.getLogger(__name__)
+DB = Db.instance()
+
+class Posts(DbAdapterHolder):
+    """Handles critical/core post ops and data."""
+
+    # LRU cache for (author-permlink -> id) lookup (~400mb per 1M entries)
+    CACHE_SIZE = 2000000
+    _ids = collections.OrderedDict()
+    _hits = 0
+    _miss = 0
+
+    comment_payout_ops = {}
+    _comment_payout_ops = []
+
+    @classmethod
+    def last_id(cls):
+        """Get the last indexed post id."""
+        sql = "SELECT MAX(id) FROM hive_posts WHERE counter_deleted = 0"
+        return DB.query_one(sql) or 0
+
+    @classmethod
+    def get_id(cls, author, permlink):
+        """Look up id by author/permlink, making use of LRU cache."""
+        url = author+'/'+permlink
+        if url in cls._ids:
+            cls._hits += 1
+            _id = cls._ids.pop(url)
+            cls._ids[url] = _id
+        else:
+            cls._miss += 1
+            sql = """
+                SELECT hp.id
+                FROM hive_posts hp
+                INNER JOIN hive_accounts ha_a ON ha_a.id = hp.author_id
+                INNER JOIN hive_permlink_data hpd_p ON hpd_p.id = hp.permlink_id
+                WHERE ha_a.name = :a AND hpd_p.permlink = :p
+            """
+            _id = DB.query_one(sql, a=author, p=permlink)
+            if _id:
+                cls._set_id(url, _id)
+
+        # cache stats (under 10M every 10K else every 100K)
+        total = cls._hits + cls._miss
+        if total % 100000 == 0:
+            log.info("pid lookups: %d, hits: %d (%.1f%%), entries: %d",
+                     total, cls._hits, 100.0*cls._hits/total, len(cls._ids))
+
+        return _id
+
+    @classmethod
+    def _set_id(cls, url, pid):
+        """Add an entry to the LRU, maintaining max size."""
+        assert pid, "no pid provided for %s" % url
+        if len(cls._ids) > cls.CACHE_SIZE:
+            cls._ids.popitem(last=False)
+        cls._ids[url] = pid
+
+    @classmethod
+    def delete_op(cls, op, block_date):
+        """Given a delete_comment op, mark the post as deleted.
+
+        Also remove it from post-cache and feed-cache.
+        """
+        cls.delete(op, block_date)
+
+    @classmethod
+    def comment_op(cls, op, block_date):
+        """Register new/edited/undeleted posts; insert into feed cache."""
+
+        md = {}
+        # At least one case where jsonMetadata was double-encoded: condenser#895
+        # jsonMetadata = JSON.parse(jsonMetadata);
+        try:
+            md = loads(op['json_metadata'])
+            if not isinstance(md, dict):
+                md = {}
+        except Exception:
+            pass
+
+        tags = []
+        if md and 'tags' in md and isinstance(md['tags'], list):
+            tags = md['tags']
+
+        sql = """
+            SELECT is_new_post, id, author_id, permlink_id, post_category, parent_id, community_id, is_valid, is_muted, depth
+            FROM process_hive_post_operation((:author)::varchar, (:permlink)::varchar, (:parent_author)::varchar, (:parent_permlink)::varchar, (:date)::timestamp, (:community_support_start_block)::integer, (:block_num)::integer, (:tags)::VARCHAR[]);
+            """
+
+        row = DB.query_row(sql, author=op['author'], permlink=op['permlink'], parent_author=op['parent_author'],
+                   parent_permlink=op['parent_permlink'], date=block_date, community_support_start_block=Community.start_block, block_num=op['block_num'], tags=tags)
+
+        result = dict(row)
+
+        # TODO we need to enhance checking related community post validation and honor is_muted.
+        error = cls._verify_post_against_community(op, result['community_id'], result['is_valid'], result['is_muted'])
+
+        cls._set_id(op['author']+'/'+op['permlink'], result['id'])
+
+        img_url = None
+        if 'image' in md:
+            img_url = md['image']
+            if isinstance(img_url, list) and img_url:
+                img_url = img_url[0]
+        if img_url:
+            img_url = safe_img_url(img_url)
+
+        is_new_post = result['is_new_post']
+        if is_new_post:
+            # add content data to hive_post_data
+            post_data = dict(title=op['title'] if op['title'] else '',
+                             img_url=img_url if img_url else '',
+                             body=op['body'] if op['body'] else '',
+                             json=op['json_metadata'] if op['json_metadata'] else '')
+        else:
+            # edit case. Now we need to (potentially) apply patch to the post body.
+            # empty new body means no body edit, not clear (same with other data)
+            new_body = cls._merge_post_body(id=result['id'], new_body_def=op['body']) if op['body'] else None
+            new_title = op['title'] if op['title'] else None
+            new_json = op['json_metadata'] if op['json_metadata'] else None
+            # when 'new_json' is not empty, 'img_url' should be overwritten even if it is itself empty
+            new_img = img_url if img_url else '' if new_json else None
+            post_data = dict(title=new_title, img_url=new_img, body=new_body, json=new_json)
+
+#        log.info("Adding author: {}  permlink: {}".format(op['author'], op['permlink']))
+        PostDataCache.add_data(result['id'], post_data, is_new_post)
+
+        if not DbState.is_initial_sync():
+            if error:
+                author_id = result['author_id']
+                Notify(block_num=op['block_num'], type_id='error', dst_id=author_id, when=block_date,
+                       post_id=result['id'], payload=error)
+
+    @classmethod
+    def flush_into_db(cls):
+        sql = """
+              UPDATE hive_posts AS ihp SET
+                  total_payout_value    = COALESCE( data_source.total_payout_value,                     ihp.total_payout_value ),
+                  curator_payout_value  = COALESCE( data_source.curator_payout_value,                   ihp.curator_payout_value ),
+                  author_rewards        = CAST( data_source.author_rewards as BIGINT ) + ihp.author_rewards,
+                  author_rewards_hive   = COALESCE( CAST( data_source.author_rewards_hive as BIGINT ),  ihp.author_rewards_hive ),
+                  author_rewards_hbd    = COALESCE( CAST( data_source.author_rewards_hbd as BIGINT ),   ihp.author_rewards_hbd ),
+                  author_rewards_vests  = COALESCE( CAST( data_source.author_rewards_vests as BIGINT ), ihp.author_rewards_vests ),
+                  payout                = COALESCE( CAST( data_source.payout as DECIMAL ),              ihp.payout ),
+                  pending_payout        = COALESCE( CAST( data_source.pending_payout as DECIMAL ),      ihp.pending_payout ),
+                  payout_at             = COALESCE( CAST( data_source.payout_at as TIMESTAMP ),         ihp.payout_at ),
+                  last_payout_at        = COALESCE( CAST( data_source.last_payout_at as TIMESTAMP ),    ihp.last_payout_at ),
+                  cashout_time          = COALESCE( CAST( data_source.cashout_time as TIMESTAMP ),      ihp.cashout_time ),
+                  is_paidout            = COALESCE( CAST( data_source.is_paidout as BOOLEAN ),          ihp.is_paidout ),
+                  total_vote_weight     = COALESCE( CAST( data_source.total_vote_weight as NUMERIC ),   ihp.total_vote_weight )
+              FROM
+              (
+              SELECT  ha_a.id as author_id, hpd_p.id as permlink_id,
+                      t.total_payout_value,
+                      t.curator_payout_value,
+                      t.author_rewards,
+                      t.author_rewards_hive,
+                      t.author_rewards_hbd,
+                      t.author_rewards_vests,
+                      t.payout,
+                      t.pending_payout,
+                      t.payout_at,
+                      t.last_payout_at,
+                      t.cashout_time,
+                      t.is_paidout,
+                      t.total_vote_weight
+              from
+              (
+              VALUES
+                --- put all constant values here
+                {}
+              ) AS T(author, permlink,
+                      total_payout_value,
+                      curator_payout_value,
+                      author_rewards,
+                      author_rewards_hive,
+                      author_rewards_hbd,
+                      author_rewards_vests,
+                      payout,
+                      pending_payout,
+                      payout_at,
+                      last_payout_at,
+                      cashout_time,
+                      is_paidout,
+                      total_vote_weight)
+              INNER JOIN hive_accounts ha_a ON ha_a.name = t.author
+              INNER JOIN hive_permlink_data hpd_p ON hpd_p.permlink = t.permlink
+              ) as data_source
+              WHERE ihp.permlink_id = data_source.permlink_id and ihp.author_id = data_source.author_id
+        """
+
+        for chunk in chunks(cls._comment_payout_ops, 1000):
+            cls.beginTx()
+
+            values_str = ','.join(chunk)
+            actual_query = sql.format(values_str)
+            cls.db.query(actual_query)
+
+            cls.commitTx()
+
+        n = len(cls._comment_payout_ops)
+        cls._comment_payout_ops.clear()
+        return n
+
+    @classmethod
+    def comment_payout_op(cls):
+        values_limit = 1000
+
+        """ Process comment payment operations """
+        for k, v in cls.comment_payout_ops.items():
+            author                    = None
+            permlink                  = None
+
+            # author payouts
+            author_rewards            = 0
+            author_rewards_hive       = None
+            author_rewards_hbd        = None
+            author_rewards_vests      = None
+
+            # total payout for comment
+            #comment_author_reward     = None
+            #curators_vesting_payout   = None
+            total_payout_value        = None;
+            curator_payout_value      = None;
+            #beneficiary_payout_value  = None;
+
+            payout                    = None
+            pending_payout            = None
+
+            payout_at                 = None
+            last_payout_at            = None
+            cashout_time              = None
+
+            is_paidout                = None
+
+            total_vote_weight         = None
+
+            # final payout indicator - by default all rewards are zero, but might be overwritten by other operations
+            if v[ 'comment_payout_update_operation' ] is not None:
+              value, date = v[ 'comment_payout_update_operation' ]
+              if author is None:
+                author = value['author']
+                permlink = value['permlink']
+              is_paidout              = True
+              payout_at               = date
+              last_payout_at          = date
+              cashout_time            = "infinity"
+
+              pending_payout          = 0
+
+            # author rewards in current (final or nonfinal) payout (always comes with comment_reward_operation)
+            if v[ 'author_reward_operation' ] is not None:
+              value, date = v[ 'author_reward_operation' ]
+              if author is None:
+                author = value['author']
+                permlink = value['permlink']
+              author_rewards_hive     = value['hive_payout']['amount']
+              author_rewards_hbd      = value['hbd_payout']['amount']
+              author_rewards_vests    = value['vesting_payout']['amount']
+              #curators_vesting_payout = value['curators_vesting_payout']['amount']
+
+            # summary of comment rewards in current (final or nonfinal) payout (always comes with author_reward_operation)
+            if v[ 'comment_reward_operation' ] is not None:
+              value, date = v[ 'comment_reward_operation' ]
+              if author is None:
+                author = value['author']
+                permlink = value['permlink']
+              #comment_author_reward   = value['payout']
+              author_rewards          = value['author_rewards']
+              total_payout_value      = value['total_payout_value']
+              curator_payout_value    = value['curator_payout_value']
+              #beneficiary_payout_value = value['beneficiary_payout_value']
+
+              payout = sum([ sbd_amount(total_payout_value), sbd_amount(curator_payout_value) ])
+              pending_payout = 0
+              last_payout_at = date
+
+            # estimated pending_payout from vote (if exists with actual payout the value comes from vote cast after payout)
+            if v[ 'effective_comment_vote_operation' ] is not None:
+              value, date = v[ 'effective_comment_vote_operation' ]
+              if author is None:
+                author = value['author']
+                permlink = value['permlink']
+              pending_payout          = sbd_amount( value['pending_payout'] )
+              total_vote_weight       = value['total_vote_weight']
+
+
+            cls._comment_payout_ops.append("('{}', {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {})".format(
+              author,
+              escape_characters(permlink),
+              "NULL" if ( total_payout_value is None ) else ( "'{}'".format( legacy_amount(total_payout_value) ) ),
+              "NULL" if ( curator_payout_value is None ) else ( "'{}'".format( legacy_amount(curator_payout_value) ) ),
+              author_rewards,
+              "NULL" if ( author_rewards_hive is None ) else author_rewards_hive,
+              "NULL" if ( author_rewards_hbd is None ) else author_rewards_hbd,
+              "NULL" if ( author_rewards_vests is None ) else author_rewards_vests,
+              "NULL" if ( payout is None ) else payout,
+              "NULL" if ( pending_payout is None ) else pending_payout,
+
+              "NULL" if ( payout_at is None ) else ( "'{}'::timestamp".format( payout_at ) ),
+              "NULL" if ( last_payout_at is None ) else ( "'{}'::timestamp".format( last_payout_at ) ),
+              "NULL" if ( cashout_time is None ) else ( "'{}'::timestamp".format( cashout_time ) ),
+
+              "NULL" if ( is_paidout is None ) else is_paidout,
+
+              "NULL" if ( total_vote_weight is None ) else total_vote_weight ))
+
+
+        n = len(cls.comment_payout_ops)
+        cls.comment_payout_ops.clear()
+        return n
+
+    @classmethod
+    def update_child_count(cls, child_id, op='+'):
+        """ Increase/decrease child count by 1 """
+        sql = """
+            UPDATE
+                hive_posts
+            SET
+                children = GREATEST(0, (
+                    SELECT
+                        CASE
+                            WHEN children is NULL THEN 0
+                            WHEN children=32762 THEN 0
+                            ELSE children
+                        END
+                    FROM
+                        hive_posts
+                    WHERE id = (SELECT parent_id FROM hive_posts WHERE id = :child_id)
+                )::int
+        """
+        if op == '+':
+            sql += """ + 1)"""
+        else:
+            sql += """ - 1)"""
+        sql += """ WHERE id = (SELECT parent_id FROM hive_posts WHERE id = :child_id)"""
+
+        DB.query(sql, child_id=child_id)
+
+    @classmethod
+    def comment_options_op(cls, op):
+        """ Process comment_options_operation """
+        max_accepted_payout = legacy_amount(op['max_accepted_payout']) if 'max_accepted_payout' in op else '1000000.000 HBD'
+        allow_votes = op['allow_votes'] if 'allow_votes' in op else True
+        allow_curation_rewards = op['allow_curation_rewards'] if 'allow_curation_rewards' in op else True
+        percent_hbd = op['percent_hbd'] if 'percent_hbd' in op else 10000
+        extensions = op['extensions'] if 'extensions' in op else []
+        beneficiaries = []
+        for ex in extensions:
+            if 'type' in ex and ex['type'] == 'comment_payout_beneficiaries' and 'beneficiaries' in ex['value']:
+                beneficiaries = ex['value']['beneficiaries']
+        sql = """
+            UPDATE
+                hive_posts hp
+            SET
+                max_accepted_payout = :max_accepted_payout,
+                percent_hbd = :percent_hbd,
+                allow_votes = :allow_votes,
+                allow_curation_rewards = :allow_curation_rewards,
+                beneficiaries = :beneficiaries
+            WHERE
+            hp.author_id = (SELECT id FROM hive_accounts WHERE name = :author) AND
+            hp.permlink_id = (SELECT id FROM hive_permlink_data WHERE permlink = :permlink)
+        """
+        DB.query(sql, author=op['author'], permlink=op['permlink'], max_accepted_payout=max_accepted_payout,
+                 percent_hbd=percent_hbd, allow_votes=allow_votes, allow_curation_rewards=allow_curation_rewards,
+                 beneficiaries=dumps(beneficiaries))
+
+    @classmethod
+    def delete(cls, op, block_date):
+        """Marks a post record as being deleted."""
+        sql = "SELECT delete_hive_post((:author)::varchar, (:permlink)::varchar, (:block_num)::int, (:date)::timestamp);"
+        DB.query_no_return(sql, author=op['author'], permlink = op['permlink'], block_num=op['block_num'], date=block_date)
+
+    @classmethod
+    def _verify_post_against_community(cls, op, community_id, is_valid, is_muted):
+        error = None
+        if community_id and is_valid and not Community.is_post_valid(community_id, op):
+            error = 'not authorized'
+            #is_valid = False # TODO: reserved for future blacklist status?
+            is_muted = True
+        return error
+
+    @classmethod
+    def _merge_post_body(cls, id, new_body_def):
+        new_body = ''
+        old_body = ''
+
+        try:
+            dmp = diff_match_patch()
+            patch = dmp.patch_fromText(new_body_def)
+            if patch is not None and len(patch):
+                old_body = PostDataCache.get_post_body(id)
+                new_body, _ = dmp.patch_apply(patch, old_body)
+                #new_utf8_body = new_body.decode('utf-8')
+                #new_body = new_utf8_body
+            else:
+                new_body = new_body_def
+        except ValueError as e:
+#            log.info("Merging a body post id: {} caused an ValueError exception {}".format(id, e))
+#            log.info("New body definition: {}".format(new_body_def))
+#            log.info("Old body definition: {}".format(old_body))
+            new_body = new_body_def
+        except Exception as ex:
+            log.info("Merging a body post id: {} caused an unknown exception {}".format(id, ex))
+            log.info("New body definition: {}".format(new_body_def))
+            log.info("Old body definition: {}".format(old_body))
+            new_body = new_body_def
+
+        return new_body
+
+
+    @classmethod
+    def flush(cls):
+      return cls.comment_payout_op() + cls.flush_into_db()
diff --git a/hive/server/bridge_api/methods.py b/hive/server/bridge_api/methods.py
index c9fb5ef0ef998cc8c879806a49473e8ba5ca66c4..a2bb35e6c6b89ff372d8b3499861836b4d835dc2 100644
--- a/hive/server/bridge_api/methods.py
+++ b/hive/server/bridge_api/methods.py
@@ -19,8 +19,11 @@ from hive.server.common.mutes import Mutes
 async def get_profile(context, account, observer=None):
     """Load account/profile data."""
     db = context['db']
+    account = valid_account(account)
+    observer = valid_account(observer, allow_empty=True)
+
     ret = await load_profiles(db, [valid_account(account)])
-    assert ret, 'Account \'{}\' does not exist'.format(account)
+    assert ret, 'Account \'{}\' does not exist'.format(account) # should not be needed
 
     observer_id = await get_account_id(db, observer) if observer else None
     if observer_id:
diff --git a/hive/server/database_api/methods.py b/hive/server/database_api/methods.py
index c85de29a418727b83bbb1cf2e8fa60fd76ff368a..405da77083772d51331f15630b5be24846771d37 100644
--- a/hive/server/database_api/methods.py
+++ b/hive/server/database_api/methods.py
@@ -4,6 +4,7 @@ from enum import Enum
 from hive.server.common.helpers import return_error_info, valid_limit, valid_account, valid_permlink, valid_date
 from hive.server.database_api.objects import database_post_object
 from hive.server.common.helpers import json_date
+from hive.utils.normalize import escape_characters
 
 @return_error_info
 async def list_comments(context, start: list, limit: int = 1000, order: str = None):
@@ -140,9 +141,10 @@ async def find_comments(context, comments: list):
             hp.author_rewards
         FROM
             hive_posts_view hp
-        JOIN (VALUES {}) AS t (author, permlink) ON hp.author = t.author AND hp.permlink = t.permlink
+        JOIN (VALUES {}) AS t (author, permlink, number) ON hp.author = t.author AND hp.permlink = t.permlink
         WHERE
             NOT hp.is_muted
+        ORDER BY t.number
     """
 
     idx = 0
@@ -156,7 +158,7 @@ async def find_comments(context, comments: list):
             continue
         if idx > 0:
             values += ","
-        values += "('{}','{}')".format(author, permlink) # escaping most likely needed
+        values += "({},{},{})".format(escape_characters(author), escape_characters(permlink), idx)
         idx += 1
     sql = SQL_TEMPLATE.format(values)
 
diff --git a/hive/server/hive_api/common.py b/hive/server/hive_api/common.py
index dd882678996101a7617e1eb6b3e482345692ddf1..fe39d4c82e296e9ac2ae57010f56fee6e483fefa 100644
--- a/hive/server/hive_api/common.py
+++ b/hive/server/hive_api/common.py
@@ -14,15 +14,11 @@ def __used_refs():
 
 async def get_community_id(db, name):
     """Get community id from db."""
-    return await db.query_one("SELECT id FROM hive_communities WHERE name = :name",
-                              name=name)
+    return await db.query_one("SELECT find_community_id( (:name)::VARCHAR, True )", name=name)
 
 async def get_account_id(db, name):
     """Get account id from account name."""
-    assert name, 'no account name specified'
-    _id = await db.query_one("SELECT id FROM hive_accounts WHERE name = :n", n=name)
-    assert _id, "account not found: `%s`" % name
-    return _id
+    return await db.query_one("SELECT find_account_id( (:name)::VARCHAR, True )", name=name)
 
 def estimated_sp(vests):
     """Convert VESTS to SP units for display."""
diff --git a/hive/server/hive_api/community.py b/hive/server/hive_api/community.py
index c3828d21826ebe205f5c3e78fcab0b0915afcefc..08c2e770593e538fed20d3db4ee5d68eb76e6fe8 100644
--- a/hive/server/hive_api/community.py
+++ b/hive/server/hive_api/community.py
@@ -35,11 +35,10 @@ async def get_community(context, name, observer=None):
     """
     db = context['db']
     cid = await get_community_id(db, name)
-    assert cid, 'community not found'
     communities = await load_communities(db, [cid], lite=False)
 
     if observer:
-        valid_account(observer)
+        observer = valid_account(observer)
         observer_id = await get_account_id(db, observer)
         await _append_observer_roles(db, communities, observer_id)
         await _append_observer_subs(db, communities, observer_id)
@@ -50,9 +49,8 @@ async def get_community(context, name, observer=None):
 async def get_community_context(context, name, account):
     """For a community/account: returns role, title, subscribed state"""
     db = context['db']
-    valid_account(account)
+    account = valid_account(account)
     cid = await get_community_id(db, name)
-    assert cid, 'community not found'
 
     aid = await get_account_id(db, account)
     assert aid, 'account not found'
@@ -111,7 +109,7 @@ async def list_pop_communities(context, limit:int=25):
 async def list_all_subscriptions(context, account):
     """Lists all communities `account` subscribes to, plus role and title in each."""
     db = context['db']
-    valid_account(account)
+    account = valid_account(account)
     account_id = await get_account_id(db, account)
 
     sql = """SELECT c.name, c.title, COALESCE(r.role_id, 0), COALESCE(r.title, '')
@@ -183,6 +181,7 @@ async def list_communities(context, last='', limit=100, query=None, sort='rank',
     # append observer context, leadership data
     communities = await load_communities(db, ids, lite=True)
     if observer:
+        observer = valid_account(observer)
         observer_id = await get_account_id(db, observer)
         await _append_observer_subs(db, communities, observer_id)
         await _append_observer_roles(db, communities, observer_id)
@@ -349,11 +348,12 @@ async def top_community_authors(context, community):
 async def top_community_muted(context, community):
     """Get top authors (by SP) who are muted in a community."""
     db = context['db']
+    cid = await get_community_id(db, community)
     sql = """SELECT a.name, a.voting_weight, r.title FROM hive_accounts a
                JOIN hive_roles r ON a.id = r.account_id
               WHERE r.community_id = :community_id AND r.role_id < 0
            ORDER BY voting_weight DESC LIMIT 5"""
-    return await db.query(sql, community_id=await get_community_id(db, community))
+    return await db.query(sql, community_id=cid)
 
 async def _top_community_posts(db, community, limit=50):
     # TODO: muted equivalent
diff --git a/hive/server/hive_api/objects.py b/hive/server/hive_api/objects.py
index f6bb342f9e97ef917dcdd53e31c4444ef172bb44..560269c8780d9574f34f42a4eb7ef603ad3c370f 100644
--- a/hive/server/hive_api/objects.py
+++ b/hive/server/hive_api/objects.py
@@ -36,6 +36,7 @@ async def accounts_by_name(db, names, observer=None, lite=True):
         accounts[account['id']] = account
 
     if observer:
+        observer = valid_account(observer)
         await _follow_contexts(db, accounts,
                                observer_id=await get_account_id(db, observer),
                                include_mute=not lite)
diff --git a/hive/server/hive_api/public.py b/hive/server/hive_api/public.py
index 273607a56d1e9bb5c97c4bbdcda1afbd8ceb0ae8..0009453caec02d25700e01625884f8c2a65590ea 100644
--- a/hive/server/hive_api/public.py
+++ b/hive/server/hive_api/public.py
@@ -58,6 +58,7 @@ async def list_following(context, account:str, start:str='', limit:int=50, obser
 async def list_all_muted(context, account):
     """Get a list of all account names muted by `account`."""
     db = context['db']
+    account = valid_account(account)
     sql = """SELECT a.name FROM hive_follows f
                JOIN hive_accounts a ON f.following_id = a.id
               WHERE follower = :follower AND state = 2"""
diff --git a/hive/utils/post_active.py b/hive/utils/post_active.py
index 99e37c4700879d174cdf0a27bbd37f4369bffb66..86a272aca2d10bf5023db0230cf311ccb82c7dab 100644
--- a/hive/utils/post_active.py
+++ b/hive/utils/post_active.py
@@ -6,54 +6,55 @@ DB = Db.instance()
 There are three cases when 'active' field in post is updated:
 1) when a descendant post comment was added (recursivly on any depth)
 2) when a descendant post comment was deleted (recursivly on any depth)
-3) when the post is updated
+3) when the post is updated - that one only updates that post active (not here)
 
 It means that, when the comment for posts is updated then its 'active' field
 does not propagate for its ancestors.
 """
 
 update_active_sql = """
-    WITH RECURSIVE parent_posts ( parent_id, post_id, intrusive_active) AS (
-    	SELECT
-    		parent_id as parent_id,
-    		id as post_id,
-    		CASE WHEN hp1.active = hp1.created_at OR hp1.counter_deleted > 0 THEN hp1.active
-    		ELSE hp1.created_at
-    		END as intrusive_active
-    	FROM hive_posts hp1 {}
-    	UNION
-    	SELECT
-    		hp2.parent_id as parent_id,
-    		id as post_id,
-    		max_time_stamp(
-    			CASE WHEN hp2.active = hp2.created_at OR hp2.counter_deleted > 0 THEN hp2.active
-    			ELSE hp2.created_at
-    			END
-    			, pp.intrusive_active
-    		) as intrusive_active
-    	FROM parent_posts pp
-    	JOIN hive_posts hp2 ON pp.parent_id = hp2.id
-    	WHERE hp2.depth > 0 AND pp.intrusive_active > hp2.active
+    WITH RECURSIVE parent_posts ( parent_id, post_id, intrusive_active ) AS (
+      SELECT
+        hp1.parent_id as parent_id,
+        hp1.id as post_id,
+        CASE WHEN hp1.counter_deleted > 0 THEN hp1.active
+        ELSE hp1.created_at
+        END as intrusive_active
+      FROM hive_posts hp1
+      WHERE hp1.depth > 0 {}
+      UNION
+      SELECT
+        hp2.parent_id as parent_id,
+        hp2.id as post_id,
+        max_time_stamp(
+          CASE WHEN hp2.counter_deleted > 0 THEN hp2.active
+          ELSE hp2.created_at
+          END
+          , pp.intrusive_active
+        ) as intrusive_active
+      FROM parent_posts pp
+      JOIN hive_posts hp2 ON pp.parent_id = hp2.id
+      WHERE hp2.depth > 0
     )
-   UPDATE
-       hive_posts
-   SET
-       active = new_active
-   FROM
-   (
-        SELECT hp.id as post_id, max_time_stamp( hp.active, MAX(pp.intrusive_active)) as new_active
-        FROM parent_posts pp
-        JOIN hive_posts hp ON pp.parent_id = hp.id GROUP BY hp.id
+    UPDATE
+      hive_posts
+    SET
+      active = new_active
+    FROM
+    (
+      SELECT hp.id as post_id, max_time_stamp( hp.active, MAX(pp.intrusive_active) ) as new_active
+      FROM parent_posts pp
+      JOIN hive_posts hp ON pp.parent_id = hp.id GROUP BY hp.id
     ) as dataset
     WHERE dataset.post_id = hive_posts.id;
     """
 
 def update_all_posts_active():
-    DB.query_no_return(update_active_sql.format( "WHERE ( children = 0 OR hp1.counter_deleted > 0 ) AND depth > 0" ))
+    DB.query_no_return(update_active_sql.format( "AND ( hp1.children = 0 )" ))
 
 @time_it
 def update_active_starting_from_posts_on_block( first_block_num, last_block_num ):
     if first_block_num == last_block_num:
-            DB.query_no_return(update_active_sql.format( "WHERE block_num={} AND depth > 0" ).format(first_block_num) )
+            DB.query_no_return(update_active_sql.format( "AND hp1.block_num = {}" ).format(first_block_num) )
             return
-    DB.query_no_return(update_active_sql.format( "WHERE block_num>={} AND block_num <={} AND depth > 0" ).format(first_block_num, last_block_num) )
+    DB.query_no_return(update_active_sql.format( "AND hp1.block_num >= {} AND hp1.block_num <= {}" ).format(first_block_num, last_block_num) )
diff --git a/tests/tests_api b/tests/tests_api
index 395f7109b2544cd1ace33a6027a0078d65fb1722..527f27b14bdf10c8a543b015dc3cad0afb0ada6c 160000
--- a/tests/tests_api
+++ b/tests/tests_api
@@ -1 +1 @@
-Subproject commit 395f7109b2544cd1ace33a6027a0078d65fb1722
+Subproject commit 527f27b14bdf10c8a543b015dc3cad0afb0ada6c