Skip to content
Snippets Groups Projects
Commit 8c232309 authored by Bartek Wrona's avatar Bartek Wrona
Browse files

Merge branch 'date_corrections' into 'develop'

Fixes related to dates

See merge request !72
parents 0dfcad44 1fc83924
No related branches found
No related tags found
4 merge requests!456Release candidate v1 24,!230Setup monitoring with pghero,!135Enable postgres monitoring on CI server,!72Fixes related to dates
......@@ -346,7 +346,7 @@ class DbState:
cls._set_ver(17)
if cls._ver == 17:
cls.db().query("INSERT INTO hive_accounts (name, created_at) VALUES ('', '1990-01-01T00:00:00') ON CONFLICT (name) DO NOTHING")
cls.db().query("INSERT INTO hive_accounts (name, created_at) VALUES ('', '1970-01-01T00:00:00') ON CONFLICT (name) DO NOTHING")
cls.db().query("INSERT INTO hive_permlink_data (permlink) VALUES ('') ON CONFLICT (permlink) DO NOTHING")
cls.db().query("INSERT INTO hive_category_data (category) VALUES ('') ON CONFLICT (category) DO NOTHING")
cls._set_ver(18)
......
......@@ -92,8 +92,8 @@ def build_metadata():
# core stats/indexes
sa.Column('payout', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'),
sa.Column('pending_payout', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'),
sa.Column('payout_at', sa.DateTime, nullable=False, server_default='1990-01-01'),
sa.Column('updated_at', sa.DateTime, nullable=False, server_default='1990-01-01'),
sa.Column('payout_at', sa.DateTime, nullable=False, server_default='1970-01-01'),
sa.Column('updated_at', sa.DateTime, nullable=False, server_default='1970-01-01'),
sa.Column('is_paidout', BOOLEAN, nullable=False, server_default='0'),
# ui flags/filters
......@@ -119,7 +119,6 @@ def build_metadata():
sa.Column('vote_rshares', sa.BigInteger, nullable=False, server_default='0'),
sa.Column('net_votes', sa.Integer, nullable=False, server_default='0'),
sa.Column('active', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
sa.Column('last_payout', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
sa.Column('cashout_time', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
sa.Column('max_cashout_time', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),
sa.Column('percent_hbd', sa.Integer, nullable=False, server_default='10000'),
......@@ -392,7 +391,7 @@ def setup(db):
"INSERT INTO hive_permlink_data (id, permlink) VALUES (0, '')",
"INSERT INTO hive_category_data (id, category) VALUES (0, '')",
"INSERT INTO hive_accounts (id, name, created_at) VALUES (0, '', '1990-01-01T00:00:00')",
"INSERT INTO hive_accounts (id, name, created_at) VALUES (0, '', '1970-01-01T00:00:00')",
"INSERT INTO hive_accounts (name, created_at) VALUES ('miners', '2016-03-24 16:05:00')",
"INSERT INTO hive_accounts (name, created_at) VALUES ('null', '2016-03-24 16:05:00')",
......
......@@ -23,14 +23,17 @@ class Blocks:
"""Processes blocks, dispatches work, manages `hive_blocks` table."""
blocks_to_flush = []
ops_stats = {}
_head_block_date = None
_head_block_date = None # timestamp of last fully processed block ("previous block")
_current_block_date = None # timestamp of block currently being processes ("current block")
def __init__(cls):
head_date = cls.head_date()
if(head_date == ''):
cls._head_block_date = None
cls._current_block_date = None
else:
cls._head_block_date = head_date
cls._current_block_date = head_date
@staticmethod
def merge_ops_stats(od1, od2):
......@@ -157,12 +160,15 @@ class Blocks:
"""Process a single block. Assumes a trx is open."""
#pylint: disable=too-many-branches
num = cls._push(block)
block_date = block['timestamp']
cls._current_block_date = block['timestamp']
# head block date shall point to last imported block (not yet current one) to conform hived behavior.
# that's why operations processed by node are included in the block being currently produced, so its processing time is equal to last produced block.
# unfortunately it is not true to all operations, most likely in case of dates that used to come from
# FatNode where it supplemented it with its-current head block, since it was already past block processing,
# it saw later block (equal to _current_block_date here)
if cls._head_block_date is None:
cls._head_block_date = block_date
cls._head_block_date = cls._current_block_date
json_ops = []
update_comment_pending_payouts = []
......@@ -231,10 +237,10 @@ class Blocks:
if is_initial_sync:
if num in virtual_operations:
(vote_ops, comment_payout_stats) = Blocks.prepare_vops(Posts.comment_payout_ops, virtual_operations[num], cls._head_block_date)
(vote_ops, comment_payout_stats) = Blocks.prepare_vops(Posts.comment_payout_ops, virtual_operations[num], cls._current_block_date)
else:
vops = hived.get_virtual_operations(num)
(vote_ops, comment_payout_stats) = Blocks.prepare_vops(Posts.comment_payout_ops, vops, cls._head_block_date)
(vote_ops, comment_payout_stats) = Blocks.prepare_vops(Posts.comment_payout_ops, vops, cls._current_block_date)
if vote_ops is not None:
for k, v in vote_ops.items():
......@@ -243,7 +249,7 @@ class Blocks:
if Posts.comment_payout_ops:
cls.ops_stats = Blocks.merge_ops_stats(cls.ops_stats, comment_payout_stats)
cls._head_block_date = block_date
cls._head_block_date = cls._current_block_date
return num
......@@ -365,7 +371,7 @@ class Blocks:
if post_ids:
DB.query("DELETE FROM hive_post_tags WHERE post_id IN :ids", ids=post_ids)
DB.query("DELETE FROM hive_posts WHERE id IN :ids", ids=post_ids)
DB.query("DELETE FROM hive_posts_data WHERE id IN :ids", ids=post_ids)
DB.query("DELETE FROM hive_post_data WHERE id IN :ids", ids=post_ids)
DB.query("DELETE FROM hive_payments WHERE block_num = :num", num=num)
DB.query("DELETE FROM hive_blocks WHERE num = :num", num=num)
......
......@@ -161,7 +161,6 @@ class Posts:
payout = COALESCE( CAST( data_source.payout as DECIMAL ), ihp.payout ),
pending_payout = COALESCE( CAST( data_source.pending_payout as DECIMAL ), ihp.pending_payout ),
payout_at = COALESCE( CAST( data_source.payout_at as TIMESTAMP ), ihp.payout_at ),
last_payout = COALESCE( CAST( data_source.last_payout as TIMESTAMP ), ihp.last_payout ),
cashout_time = COALESCE( CAST( data_source.cashout_time as TIMESTAMP ), ihp.cashout_time ),
is_paidout = COALESCE( CAST( data_source.is_paidout as BOOLEAN ), ihp.is_paidout )
FROM
......@@ -176,7 +175,6 @@ class Posts:
t.payout,
t.pending_payout,
t.payout_at,
t.last_payout,
t.cashout_time,
t.is_paidout
from
......@@ -194,7 +192,6 @@ class Posts:
payout,
pending_payout,
payout_at,
last_payout,
cashout_time,
is_paidout)
INNER JOIN hive_accounts ha_a ON ha_a.name = t.author
......@@ -241,7 +238,6 @@ class Posts:
pending_payout = None
payout_at = None
last_payout = None
cashout_time = None
is_paidout = None
......@@ -300,14 +296,12 @@ class Posts:
#Calculations of all dates
if ( is_paidout is not None ):
payout_at = date
last_payout = date
cashout_time = "1969-12-31T23:59:59"
else:
if ( total_payout_value is not None ):
payout_at = date #Here should be `cashout_time`
last_payout = date
cls._comment_payout_ops.append("('{}', '{}', {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {})".format(
cls._comment_payout_ops.append("('{}', '{}', {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {})".format(
author,
permlink,
"NULL" if ( total_payout_value is None ) else ( "'{}'".format( legacy_amount(total_payout_value) ) ),
......@@ -320,7 +314,6 @@ class Posts:
"NULL" if ( pending_payout is None ) else pending_payout,
"NULL" if ( payout_at is None ) else ( "'{}'::timestamp".format( payout_at ) ),
"NULL" if ( last_payout is None ) else ( "'{}'::timestamp".format( last_payout ) ),
"NULL" if ( cashout_time is None ) else ( "'{}'::timestamp".format( cashout_time ) ),
"NULL" if ( is_paidout is None ) else is_paidout ))
......
......@@ -44,7 +44,6 @@ def prepare_vops(vops_by_block):
for blockNum, blockDict in vops_by_block.items():
vopsList = blockDict['ops']
date = blockDict['timestamp']
preparedVops[blockNum] = vopsList
return preparedVops
......
# generated by setup.py
# contents will be overwritten
VERSION = '0.0.1'
GIT_REVISION = '5c0b832'
GIT_REVISION = '0dfcad4'
......@@ -28,7 +28,7 @@ INSERT INTO hive_db_version (version, notes) VALUES ('1.0', 'https://gitlab.sync
-- add special author value, empty author to accounts table
-- RAISE NOTICE 'add special author value, empty author to accounts table';
INSERT INTO hive_accounts (name, created_at) VALUES ('', '1990-01-01T00:00:00');
INSERT INTO hive_accounts (name, created_at) VALUES ('', '1970-01-01T00:00:00');
-- Table to hold permlink dictionary, permlink is unique
-- RAISE NOTICE 'Table to hold permlink dictionary, permlink is unique';
......@@ -117,7 +117,6 @@ CREATE TABLE IF NOT EXISTS hive_posts_new (
vote_rshares BIGINT DEFAULT '0',
net_votes INT DEFAULT '0',
active TIMESTAMP DEFAULT '1970-01-01T00:00:00',
last_payout TIMESTAMP DEFAULT '1970-01-01T00:00:00',
cashout_time TIMESTAMP DEFAULT '1970-01-01T00:00:00',
max_cashout_time TIMESTAMP DEFAULT '1970-01-01T00:00:00',
reward_weight INT DEFAULT '0',
......
......@@ -7,8 +7,8 @@ def client():
return SteemClient(url='https://api.hive.blog')
def test_list_comments_by_cashout_time(client):
reference_data = await client.list_comments({"start":["1990-01-01T00:00:00","steemit","firstpost"],"limit":10,"order":"by_cashout_time"})
test_data = await list_comments(["1990-01-01T00:00:00","steemit","firstpost"],10,"by_cashout_time")
reference_data = await client.list_comments({"start":["1970-01-01T00:00:00","steemit","firstpost"],"limit":10,"order":"by_cashout_time"})
test_data = await list_comments(["1970-01-01T00:00:00","steemit","firstpost"],10,"by_cashout_time")
assert reference_data
assert test_data
assert len(reference_data) == len(test_data)
......
Subproject commit 855f525d497119092d24bec91c1d29648fc04c2f
Subproject commit 3cca13717750c6032f2a1efbaae6f1bdf0d151c8
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment