Skip to content
Snippets Groups Projects
Commit 11cda7cc authored by Bartek Wrona's avatar Bartek Wrona
Browse files

[BW]: fixes to found bugs

- defined vw_hive_posts to simplify queries and eliminate joins at query side
- fixed fatal bug in special characters escaping leading to modification of post body, title etc
- fixed bug in querying for root_title in condnser_api/tags_api get_comment_discussions_by method family
- fixed bug in querying for post url
- eliminated unneeded fields in _condenser_post_object causing returning not needed additional data
- tags_api group enabled in 5M smokest
parent fb9bd3dc
No related branches found
No related tags found
4 merge requests!456Release candidate v1 24,!230Setup monitoring with pghero,!135Enable postgres monitoring on CI server,!33[BW]: fixes to found bugs
This commit is part of merge request !33. Comments created here will be created in the context of that merge request.
...@@ -584,6 +584,68 @@ def setup(db): ...@@ -584,6 +584,68 @@ def setup(db):
""" """
db.query_no_return(sql) db.query_no_return(sql)
sql = """
DROP VIEW public.vw_hive_posts;
CREATE OR REPLACE VIEW public.vw_hive_posts
AS
SELECT hp.id,
hp.community_id,
ha_a.name AS author,
hpd_p.permlink,
hpd.title,
hpd.body,
hcd.category,
hp.depth,
hp.promoted,
hp.payout,
hp.payout_at,
hp.is_paidout,
hp.children,
0 AS votes,
0 AS active_votes,
hp.created_at,
hp.updated_at,
hp.rshares,
hpd.json,
hp.is_hidden,
hp.is_grayed,
hp.total_votes,
hp.flag_weight,
ha_pa.name AS parent_author,
hpd_pp.permlink AS parent_permlink,
hp.curator_payout_value,
ha_ra.name AS root_author,
hpd_rp.permlink AS root_permlink,
rcd.category as root_category,
hp.max_accepted_payout,
hp.percent_hbd,
hp.allow_replies,
hp.allow_votes,
hp.allow_curation_rewards,
hp.beneficiaries,
concat('/', rcd.category, '/@', ha_ra.name, '/', hpd_rp.permlink,
case (rp.id)
when hp.id then ''
else concat('#@', ha_a.name, '/', hpd_p.permlink)
end ) as url,
rpd.title AS root_title
FROM hive_posts hp
JOIN hive_posts rp ON rp.author_id = hp.root_author_id AND rp.permlink_id = hp.root_permlink_id
JOIN hive_post_data rpd ON rp.id = rpd.id
JOIN hive_accounts ha_a ON ha_a.id = hp.author_id
JOIN hive_permlink_data hpd_p ON hpd_p.id = hp.permlink_id
JOIN hive_post_data hpd ON hpd.id = hp.id
LEFT JOIN hive_category_data hcd ON hcd.id = hp.category_id
LEFT JOIN hive_category_data rcd ON rcd.id = rp.category_id
JOIN hive_accounts ha_pa ON ha_pa.id = hp.parent_author_id
JOIN hive_permlink_data hpd_pp ON hpd_pp.id = hp.parent_permlink_id
JOIN hive_accounts ha_ra ON ha_ra.id = hp.root_author_id
JOIN hive_permlink_data hpd_rp ON hpd_rp.id = hp.root_permlink_id;
;
"""
db.query_no_return(sql)
def reset_autovac(db): def reset_autovac(db):
"""Initializes/resets per-table autovacuum/autoanalyze params. """Initializes/resets per-table autovacuum/autoanalyze params.
......
...@@ -30,11 +30,11 @@ class PostDataCache(object): ...@@ -30,11 +30,11 @@ class PostDataCache(object):
""" """
values = [] values = []
for k, data in cls._data.items(): for k, data in cls._data.items():
title = "''" if not data['title'] else "'{}'".format(escape_characters(data['title'])) title = "''" if not data['title'] else "{}".format(escape_characters(data['title']))
preview = "''" if not data['preview'] else "'{}'".format(escape_characters(data['preview'])) preview = "''" if not data['preview'] else "{}".format(escape_characters(data['preview']))
img_url = "''" if not data['img_url'] else "'{}'".format(escape_characters(data['img_url'])) img_url = "''" if not data['img_url'] else "{}".format(escape_characters(data['img_url']))
body = "''" if not data['body'] else "'{}'".format(escape_characters(data['body'])) body = "''" if not data['body'] else "{}".format(escape_characters(data['body']))
json = "'{}'" if not data['json'] else "'{}'".format(escape_characters(data['json'])) json = "'{}'" if not data['json'] else "{}".format(escape_characters(data['json']))
values.append("({},{},{},{},{},{})".format(k, title, preview, img_url, body, json)) values.append("({},{},{},{},{},{})".format(k, title, preview, img_url, body, json))
sql += ','.join(values) sql += ','.join(values)
sql += """ sql += """
......
...@@ -29,7 +29,7 @@ class Tags(object): ...@@ -29,7 +29,7 @@ class Tags(object):
""" """
values = [] values = []
for tag in cls._tags: for tag in cls._tags:
values.append("('{}')".format(escape_characters(tag[1]))) values.append("({})".format(escape_characters(tag[1])))
if len(values) >= limit: if len(values) >= limit:
tag_query = str(sql) tag_query = str(sql)
DB.query(tag_query.format(','.join(values))) DB.query(tag_query.format(','.join(values)))
...@@ -59,7 +59,7 @@ class Tags(object): ...@@ -59,7 +59,7 @@ class Tags(object):
""" """
values = [] values = []
for tag in cls._tags: for tag in cls._tags:
values.append("({}, '{}')".format(tag[0], escape_characters(tag[1]))) values.append("({}, {})".format(tag[0], escape_characters(tag[1])))
if len(values) >= limit: if len(values) >= limit:
tag_query = str(sql) tag_query = str(sql)
DB.query(tag_query.format(','.join(values))) DB.query(tag_query.format(','.join(values)))
......
...@@ -44,49 +44,41 @@ async def load_posts_keyed(db, ids, truncate_body=0): ...@@ -44,49 +44,41 @@ async def load_posts_keyed(db, ids, truncate_body=0):
sql = """ sql = """
SELECT hp.id, SELECT hp.id,
hp.community_id, hp.community_id,
ha_a.name as author, hp.author,
hpd_p.permlink as permlink, hp.permlink,
hpd.title as title, hp.title,
hpd.body as body, hp.body,
hcd.category as category, hp.category,
depth, hp.depth,
promoted, hp.promoted,
payout, hp.payout,
payout_at, hp.payout_at,
is_paidout, hp.is_paidout,
children, hp.children,
0 as votes, hp.votes,
0 as active_votes, hp.active_votes,
hp.created_at, hp.created_at,
updated_at, hp.updated_at,
rshares, hp.rshares,
hpd.json as json, hp.json as json,
is_hidden, hp.is_hidden,
is_grayed, hp.is_grayed,
total_votes, hp.total_votes,
flag_weight, hp.flag_weight,
ha_pa.name as parent_author, hp.parent_author,
hpd_pp.permlink as parent_permlink, hp.parent_permlink,
curator_payout_value, hp.curator_payout_value,
ha_ra.name as root_author, hp.root_author,
hpd_rp.permlink as root_permlink, hp.root_permlink,
max_accepted_payout, hp.max_accepted_payout,
percent_hbd, hp.percent_hbd,
allow_replies, hp.allow_replies,
allow_votes, hp.allow_votes,
allow_curation_rewards, hp.allow_curation_rewards,
beneficiaries, hp.beneficiaries,
url, hp.url,
root_title hp.root_title
FROM hive_posts hp FROM vw_hive_posts hp
INNER JOIN hive_accounts ha_a ON ha_a.id = hp.author_id
INNER JOIN hive_permlink_data hpd_p ON hpd_p.id = hp.permlink_id
LEFT JOIN hive_post_data hpd ON hpd.id = hp.id
LEFT JOIN hive_category_data hcd ON hcd.id = hp.category_id
INNER JOIN hive_accounts ha_pa ON ha_pa.id = hp.parent_author_id
INNER JOIN hive_permlink_data hpd_pp ON hpd_pp.id = hp.parent_permlink_id
INNER JOIN hive_accounts ha_ra ON ha_ra.id = hp.root_author_id
INNER JOIN hive_permlink_data hpd_rp ON hpd_rp.id = hp.root_permlink_id
WHERE hp.id IN :ids""" WHERE hp.id IN :ids"""
result = await db.query_all(sql, ids=tuple(ids)) result = await db.query_all(sql, ids=tuple(ids))
...@@ -214,13 +206,6 @@ def _condenser_post_object(row, truncate_body=0): ...@@ -214,13 +206,6 @@ def _condenser_post_object(row, truncate_body=0):
post['body_length'] = len(row['body']) post['body_length'] = len(row['body'])
post['author_reputation'] = rep_to_raw(row['author_rep']) post['author_reputation'] = rep_to_raw(row['author_rep'])
post['root_author'] = row['root_author']
post['root_permlink'] = row['root_permlink']
post['allow_replies'] = row['allow_replies']
post['allow_votes'] = row['allow_votes']
post['allow_curation_rewards'] = row['allow_curation_rewards']
if row['depth'] > 0: if row['depth'] > 0:
post['parent_author'] = row['parent_author'] post['parent_author'] = row['parent_author']
post['parent_permlink'] = row['parent_permlink'] post['parent_permlink'] = row['parent_permlink']
......
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
import logging import logging
import math import math
import decimal import decimal
import time
from datetime import datetime from datetime import datetime
from pytz import utc from pytz import utc
import ujson as json import ujson as json
...@@ -13,14 +15,33 @@ NAI_MAP = { ...@@ -13,14 +15,33 @@ NAI_MAP = {
'@@000000037': 'VESTS', '@@000000037': 'VESTS',
} }
dct={'0':'a','1':'b','2':'c','3':'d','4':'e',
'5':'f','6':'g','7':'h','8':'i','9':'j'}
# convert special chars into their octal formats recognized by sql
special_chars={
"\\":"\\134",
"'":"\\047",
"%":"\\045",
"_":"\\137",
":":"\\072"
}
def escape_characters(text): def escape_characters(text):
""" Escape special charactes """ """ Escape special charactes """
ret = str(text) if len(text.strip()) == 0:
ret = ret.replace("\\", "\\\\") return "'" + text + "'"
ret = ret.replace("'", "''")
ret = ret.replace("%", '%%') ret = "E'"
ret = ret.replace("_", "\\_")
ret = ret.replace(":", "\\:") for ch in text:
try:
dw=special_chars[ch]
ret=ret+dw
except KeyError as k:
ret=ret+ch
ret = ret + "'"
return ret return ret
def vests_amount(value): def vests_amount(value):
......
...@@ -76,7 +76,7 @@ ADD_API_PYREST_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_D ...@@ -76,7 +76,7 @@ ADD_API_PYREST_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_D
ADD_API_PYREST_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind 5000000 database_api ) ADD_API_PYREST_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind 5000000 database_api )
ADD_API_PYREST_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind 5000000 follow_api ) ADD_API_PYREST_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind 5000000 follow_api )
ADD_API_PYREST_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind 5000000 hive_api ) ADD_API_PYREST_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind 5000000 hive_api )
ADD_API_PYREST_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind 5000000 tags_api )
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment