Skip to content
Snippets Groups Projects
Commit 8e310a8b authored by Andrzej Lisak's avatar Andrzej Lisak
Browse files

[ABW]: merge with latest develop

parents d90efed9 b8adfacb
No related branches found
No related tags found
2 merge requests!456Release candidate v1 24,!410more small changes, get_discussions_by_blog reusing bridge SQL function
......@@ -586,7 +586,6 @@ def setup(db):
"condenser_api_post_ex_type.sql",
"condenser_get_blog.sql",
"condenser_get_content.sql",
"condenser_get_discussions_by_blog.sql",
"condenser_tags.sql",
"condenser_follows.sql",
"hot_and_trends.sql",
......
......@@ -4,19 +4,20 @@ CREATE OR REPLACE FUNCTION bridge_get_account_posts_by_blog(
in _account VARCHAR,
in _author VARCHAR,
in _permlink VARCHAR,
in _limit INTEGER
in _limit INTEGER,
in _bridge_api BOOLEAN
)
RETURNS SETOF bridge_api_post
AS
$function$
DECLARE
__post_id INTEGER := 0;
__account_id INTEGER := find_account_id( _account, True );
__post_id INTEGER;
__account_id INTEGER;
__created_at TIMESTAMP;
BEGIN
IF _permlink <> '' THEN
__post_id = find_comment_id( _author, _permlink, True );
__account_id = find_account_id( _account, True );
__post_id = find_comment_id( _author, _permlink, True );
IF __post_id <> 0 THEN
SELECT hfc.created_at INTO __created_at
FROM hive_feed_cache hfc
WHERE hfc.account_id = __account_id AND hfc.post_id = __post_id;
......@@ -65,16 +66,18 @@ BEGIN
(
SELECT hfc.post_id, hfc.created_at
FROM hive_feed_cache hfc
WHERE hfc.account_id = __account_id AND (__post_id = 0 OR hfc.created_at <= __created_at)
AND NOT EXISTS (SELECT NULL FROM hive_posts hp
WHERE hp.id = hfc.post_id AND hp.counter_deleted = 0 AND hp.depth = 0 AND hp.community_id IS NOT NULL
AND NOT EXISTS (SELECT NULL FROM hive_reblogs hr WHERE hr.blogger_id = __account_id AND hr.post_id = hp.id)
WHERE hfc.account_id = __account_id AND ( __post_id = 0 OR hfc.created_at < __created_at OR ( hfc.created_at = __created_at AND hfc.post_id < __post_id ) )
AND ( NOT _bridge_api OR
NOT EXISTS (SELECT NULL FROM hive_posts hp1
WHERE hp1.id = hfc.post_id AND hp1.counter_deleted = 0 AND hp1.depth = 0 AND hp1.community_id IS NOT NULL
AND NOT EXISTS (SELECT NULL FROM hive_reblogs hr WHERE hr.blogger_id = __account_id AND hr.post_id = hp1.id)
)
)
ORDER BY created_at DESC, post_id DESC
ORDER BY hfc.created_at DESC, hfc.post_id DESC
LIMIT _limit
)T ON hp.id = T.post_id
ORDER BY T.created_at DESC, T.post_id DESC
;
) blog ON hp.id = blog.post_id
ORDER BY blog.created_at DESC, blog.post_id DESC
LIMIT _limit;
END
$function$
language plpgsql STABLE;
DROP TYPE IF EXISTS get_discussions_post CASCADE;
CREATE TYPE get_discussions_post AS (
id INT,
community_id INT,
author VARCHAR(16),
permlink VARCHAR(255),
author_rep BIGINT,
title VARCHAR(512),
body TEXT,
category VARCHAR(255),
depth SMALLINT,
promoted DECIMAL(10, 3),
payout DECIMAL(10, 3),
payout_at TIMESTAMP,
pending_payout DECIMAL(10, 3),
is_paidout BOOLEAN,
children INT,
votes INT,
active_votes INT,
created_at TIMESTAMP,
updated_at TIMESTAMP,
rshares NUMERIC,
json TEXT,
is_hidden BOOLEAN,
is_grayed BOOLEAN,
total_votes BIGINT,
parent_author VARCHAR(16),
parent_permlink_or_category VARCHAR(255),
curator_payout_value VARCHAR(30),
root_author VARCHAR(16),
root_permlink VARCHAR(255),
max_accepted_payout VARCHAR(30),
percent_hbd INT,
allow_replies BOOLEAN,
allow_votes BOOLEAN,
allow_curation_rewards BOOLEAN,
beneficiaries JSON,
url TEXT,
root_title VARCHAR(512)
);
DROP FUNCTION IF EXISTS get_created_at_for_post;
CREATE OR REPLACE FUNCTION get_created_at_for_post(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE
)
RETURNS TIMESTAMP
AS
$function$
DECLARE
__post_id INT;
__timestamp TIMESTAMP;
BEGIN
__post_id = find_comment_id(_author, _permlink, False);
IF __post_id = 0 THEN
RETURN current_timestamp;
END IF;
SELECT INTO __timestamp
created_at
FROM
hive_posts hp
WHERE
hp.id = __post_id;
RETURN __timestamp;
END
$function$
language 'plpgsql';
DROP FUNCTION IF EXISTS get_discussions_by_blog;
CREATE OR REPLACE FUNCTION get_discussions_by_blog(
in _tag hive_accounts.name%TYPE,
in _start_author hive_accounts.name%TYPE,
in _start_permlink hive_permlink_data.permlink%TYPE,
in _limit INT
)
RETURNS SETOF get_discussions_post
AS
$function$
DECLARE
__created_at TIMESTAMP;
BEGIN
__created_at = get_created_at_for_post(_start_author, _start_permlink);
RETURN QUERY SELECT
hp.id,
hp.community_id,
hp.author,
hp.permlink,
hp.author_rep,
hp.title,
hp.body,
hp.category,
hp.depth,
hp.promoted,
hp.payout,
hp.payout_at,
hp.pending_payout,
hp.is_paidout,
hp.children,
hp.votes,
hp.active_votes,
hp.created_at,
hp.updated_at,
hp.rshares,
hp.json,
hp.is_hidden,
hp.is_grayed,
hp.total_votes,
hp.parent_author,
hp.parent_permlink_or_category,
hp.curator_payout_value,
hp.root_author,
hp.root_permlink,
hp.max_accepted_payout,
hp.percent_hbd,
hp.allow_replies,
hp.allow_votes,
hp.allow_curation_rewards,
hp.beneficiaries,
hp.url,
hp.root_title
FROM hive_posts_view hp
INNER JOIN
(
SELECT
post_id
FROM
hive_feed_cache hfc
INNER JOIN hive_accounts hfc_ha ON hfc.account_id = hfc_ha.id
INNER JOIN hive_posts hfc_hp ON hfc.post_id = hfc_hp.id
WHERE
hfc_ha.name = _tag
AND hfc_hp.created_at <= __created_at
ORDER BY
hfc_hp.created_at DESC
LIMIT _limit
) ds on ds.post_id = hp.id
ORDER BY hp.created_at DESC;
END
$function$
language 'plpgsql';
\ No newline at end of file
......@@ -44,7 +44,6 @@ for sql in postgres_handle_view_changes.sql \
condenser_api_post_ex_type.sql \
condenser_get_blog.sql \
condenser_get_content.sql \
condenser_get_discussions_by_blog.sql \
condenser_tags.sql \
condenser_follows.sql \
hot_and_trends.sql \
......
......@@ -280,7 +280,7 @@ async def get_account_posts(context, sort:str, account:str, start_author:str='',
sql = None
account_posts = True # set when only posts (or reblogs) of given account are supposed to be in results
if sort == 'blog':
sql = "SELECT * FROM bridge_get_account_posts_by_blog( (:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::INTEGER )"
sql = "SELECT * FROM bridge_get_account_posts_by_blog( (:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::INTEGER, True )"
elif sort == 'feed':
sql = "SELECT * FROM bridge_get_by_feed_with_reblog((:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::INTEGER)"
elif sort == 'posts':
......
......@@ -291,12 +291,10 @@ async def get_discussions_by_blog(context, tag: str = None, start_author: str =
valid_permlink(start_permlink, allow_empty=True)
valid_limit(limit, 100, 20)
sql = """
SELECT * FROM get_discussions_by_blog(:author, :start_author, :start_permlink, :limit)
"""
sql = "SELECT * FROM bridge_get_account_posts_by_blog( (:account)::VARCHAR, (:author)::VARCHAR, (:permlink)::VARCHAR, (:limit)::INTEGER, False )"
db = context['db']
result = await db.query_all(sql, author=tag, start_author=start_author, start_permlink=start_permlink, limit=limit)
result = await db.query_all(sql, account=tag, author=start_author, permlink=start_permlink, limit=limit)
posts_by_id = []
for row in result:
......
......@@ -22,7 +22,7 @@ STRINGS = {
# personal
NotifyType.error: 'error: <payload>',
NotifyType.reblog: '<src> resteemed your post',
NotifyType.reblog: '<src> reblogged your post',
NotifyType.follow: '<src> followed you',
NotifyType.reply: '<src> replied to your post',
NotifyType.reply_comment: '<src> replied to your comment',
......
......@@ -3,7 +3,6 @@ from hive.server.common.helpers import (
return_error_info,
valid_account,
valid_permlink)
from hive.server.database_api.methods import find_votes_impl, VotesPresentation
@return_error_info
async def get_discussion(context, author: str, permlink: str, observer=None):
......
***block 4999999***
custom_json_operation("[\"reblog\",{\"account\":\"funny\",\"author\":\"steemit\",\"permlink\":\"firstpost\"}]") - very old post
custom_json_operation("[\"reblog\",{\"account\":\"funny\",\"author\":\"steak\",\"permlink\":\"streak-test\"}]") - deleted post (should not be reblogged)
custom_json_operation("[\"reblog\",{\"account\":\"funny\",\"author\":\"dollarvigilante\",\"permlink\":\"another-billionaire-warns-of-catastrophic-depths-not-seen-in-5-000-years-and-emphasizes-gold\"}]") - fresh post
\ No newline at end of file
{
"4999999": {
"previous": "004c4b3e03ea2eac2494790786bfb9e41a8669d9",
"timestamp": "2016-09-15T19:47:18",
"witness": "",
"transaction_merkle_root": "",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b3fc6a8735b4ab5433d59f4526e4a042644",
"signing_key": "",
"transaction_ids": []
},
"5000000": {
"previous": "004c4b3fc6a8735b4ab5433d59f4526e4a042644",
"timestamp": "2016-09-15T19:47:21",
"witness": "initminer",
"transaction_merkle_root": "",
"extensions": [],
"witness_signature": "",
"transactions": [
{
"ref_block_num": 100001,
"ref_block_prefix": 1,
"expiration": "2020-03-23T12:17:00",
"operations": [
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"funny"
],
"id": "follow",
"json": "[\"reblog\",{\"account\":\"funny\",\"author\":\"steemit\",\"permlink\":\"firstpost\"}]"
}
},
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"funny"
],
"id": "follow",
"json": "[\"reblog\",{\"account\":\"funny\",\"author\":\"steak\",\"permlink\":\"streak-test\"}]"
}
},
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"funny"
],
"id": "follow",
"json": "[\"reblog\",{\"account\":\"funny\",\"author\":\"dollarvigilante\",\"permlink\":\"another-billionaire-warns-of-catastrophic-depths-not-seen-in-5-000-years-and-emphasizes-gold\"}]"
}
}
]
}
],
"block_id": "004c4b40245ffb07380a393fb2b3d841b76cdaec",
"signing_key": "",
"transaction_ids": []
},
"5000001": {
"previous": "004c4b40245ffb07380a393fb2b3d841b76cdaec",
"timestamp": "2016-09-15T19:47:24",
"witness": "initminer",
"transaction_merkle_root": "",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4100000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000002": {
"previous": "004c4b4100000000000000000000000000000000",
"timestamp": "2016-09-15T19:47:27",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4200000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000003": {
"previous": "004c4b4200000000000000000000000000000000",
"timestamp": "2016-09-15T19:47:30",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4300000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000004": {
"previous": "004c4b4300000000000000000000000000000000",
"timestamp": "2016-09-15T19:47:33",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4400000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000005": {
"previous": "004c4b4400000000000000000000000000000000",
"timestamp": "2016-09-15T19:47:36",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4500000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000006": {
"previous": "004c4b4500000000000000000000000000000000",
"timestamp": "2016-09-15T19:47:39",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4600000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000007": {
"previous": "004c4b4600000000000000000000000000000000",
"timestamp": "2016-09-15T19:47:42",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4700000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000008": {
"previous": "004c4b4700000000000000000000000000000000",
"timestamp": "2016-09-15T19:47:45",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4800000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000009": {
"previous": "004c4b4800000000000000000000000000000000",
"timestamp": "2016-09-15T19:47:48",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4900000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000010": {
"previous": "004c4b4900000000000000000000000000000000",
"timestamp": "2016-09-15T19:47:51",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4a00000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000011": {
"previous": "004c4b4a00000000000000000000000000000000",
"timestamp": "2016-09-15T19:47:54",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4b00000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000012": {
"previous": "004c4b4b00000000000000000000000000000000",
"timestamp": "2016-09-15T19:47:57",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4c00000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000013": {
"previous": "004c4b4c00000000000000000000000000000000",
"timestamp": "2016-09-15T19:48:00",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4d00000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000014": {
"previous": "004c4b4d00000000000000000000000000000000",
"timestamp": "2016-09-15T19:48:03",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4e00000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000015": {
"previous": "004c4b4e00000000000000000000000000000000",
"timestamp": "2016-09-15T19:48:06",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b4f00000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000016": {
"previous": "004c4b4f00000000000000000000000000000000",
"timestamp": "2016-09-15T19:48:09",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b5000000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
},
"5000017": {
"previous": "004c4b5000000000000000000000000000000000",
"timestamp": "2016-09-15T19:48:12",
"witness": "initminer",
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"block_id": "004c4b5100000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
}
}
\ No newline at end of file
#!/bin/bash
set -euo pipefail
# For debug only!
# RUNNER_HIVEMIND_SYNC_MAX_BLOCK=10000
# RUNNER_HIVED_URL='{"default":"http://hived-node:8091"}'
# RUNNER_HIVED_URL='{"default":"http://172.17.0.1:8091"}'
hive_sync() {
# Start hive sync process
cat << EOF
Starting hive sync using hived url: ${RUNNER_HIVED_URL}.
Max sync block is: ${RUNNER_HIVEMIND_SYNC_MAX_BLOCK}.
EOF
USER=${RUNNER_POSTGRES_APP_USER}:${RUNNER_POSTGRES_APP_USER_PASSWORD}
OPTIONS="host=${RUNNER_POSTGRES_HOST}&port=${RUNNER_POSTGRES_PORT}"
DATABASE_URL="postgresql://${USER}@/${HIVEMIND_DB_NAME}?${OPTIONS}"
hive sync \
--log-mask-sensitive-data \
--pid-file hive_sync.pid \
--test-max-block=${RUNNER_HIVEMIND_SYNC_MAX_BLOCK} \
--exit-after-sync \
--test-profile=False \
--steemd-url "${RUNNER_HIVED_URL}" \
--prometheus-port 11011 \
--database-url "${DATABASE_URL}" \
--mock-block-data-path mock_data/block_data/follow_op/mock_block_data_follow.json mock_data/block_data/community_op/mock_block_data_community.json \
--community-start-block 4999998 \
2>&1 | tee -i hivemind-sync.log
}
hive_sync
#!/bin/bash
set -euo pipefail
# For debug only!
# RUNNER_HIVEMIND_SYNC_MAX_BLOCK=10000
# RUNNER_HIVED_URL='{"default":"http://hived-node:8091"}'
# RUNNER_HIVED_URL='{"default":"http://172.17.0.1:8091"}'
hive_sync() {
# Start hive sync process
cat << EOF
Starting hive sync using hived url: ${RUNNER_HIVED_URL}.
Max sync block is: ${RUNNER_HIVEMIND_SYNC_MAX_BLOCK}.
EOF
USER=${RUNNER_POSTGRES_APP_USER}:${RUNNER_POSTGRES_APP_USER_PASSWORD}
OPTIONS="host=${RUNNER_POSTGRES_HOST}&port=${RUNNER_POSTGRES_PORT}"
DATABASE_URL="postgresql://${USER}@/${HIVEMIND_DB_NAME}?${OPTIONS}"
hive sync \
--log-mask-sensitive-data \
--pid-file hive_sync.pid \
--test-max-block=${RUNNER_HIVEMIND_SYNC_MAX_BLOCK} \
--exit-after-sync \
--test-profile=False \
--steemd-url "${RUNNER_HIVED_URL}" \
--prometheus-port 11011 \
--database-url "${DATABASE_URL}" \
--mock-block-data-path mock_data/block_data/follow_op/mock_block_data_follow.json mock_data/block_data/community_op/mock_block_data_community.json mock_data/block_data/reblog_op/mock_block_data_reblog.json \
--community-start-block 4999998 \
2>&1 | tee -i hivemind-sync.log
}
hive_sync
#!/bin/bash
set -e
set -o pipefail
HIVEMIND_DB_NAME=$1
HIVEMIND_POSTGRESQL_CONNECTION_STRING=$2
HIVEMIND_SOURCE_HIVED_URL=$3
HIVEMIND_MAX_BLOCK=$4
HIVEMIND_HTTP_PORT=$5
HIVEMIND_ENABLE_DB_MONITORING=${6:-yes}
PYTHONUSERBASE=./local-site
DB_NAME=${HIVEMIND_DB_NAME//-/_}
DB_NAME=${DB_NAME//\[/_}
DB_NAME=${DB_NAME//]/_}
DB_URL=$HIVEMIND_POSTGRESQL_CONNECTION_STRING/$DB_NAME
echo Corrected db name $DB_NAME
echo Corrected db url $DB_URL
# Reuse DB_NAME as name of symbolic link pointing local hive "binary".
HIVE_NAME=$DB_NAME
if [ -f hive_sync.pid ]; then
kill -SIGINT `cat hive_sync.pid` || true;
rm hive_sync.pid;
fi
kill -SIGINT `pgrep -f "$HIVE_NAME sync"` || true;
sleep 5
kill -9 `pgrep -f "$HIVE_NAME sync"` || true;
kill -SIGINT `pgrep -f "$HIVE_NAME server"` || true;
sleep 5
kill -9 `pgrep -f "$HIVE_NAME server"` || true;
fuser $HIVEMIND_HTTP_PORT/tcp -k -INT || true
sleep 5
fuser $HIVEMIND_HTTP_PORT/tcp -k -KILL || true
sleep 5
ls -l dist/*
rm -rf ./local-site
mkdir -p `python3 -m site --user-site`
python3 setup.py install --user --force
ln -sf ./local-site/bin/hive $HIVE_NAME
./$HIVE_NAME -h
echo Attempting to recreate database $DB_NAME
psql -U $POSTGRES_USER -h localhost -d postgres -c "DROP DATABASE IF EXISTS $DB_NAME;"
if [ "$HIVEMIND_ENABLE_DB_MONITORING" = "yes" ]; then
psql -U $POSTGRES_USER -h localhost -d postgres -c "CREATE DATABASE $DB_NAME TEMPLATE template_monitoring;"
else
psql -U $POSTGRES_USER -h localhost -d postgres -c "CREATE DATABASE $DB_NAME"
fi
echo Attempting to starting hive sync using hived node: $HIVEMIND_SOURCE_HIVED_URL . Max sync block is: $HIVEMIND_MAX_BLOCK
echo Attempting to access database $DB_URL
./$HIVE_NAME sync --pid-file hive_sync.pid --test-max-block=$HIVEMIND_MAX_BLOCK --exit-after-sync --test-profile=False --steemd-url "$HIVEMIND_SOURCE_HIVED_URL" --prometheus-port 11011 --database-url $DB_URL --mock-block-data-path mock_data/block_data/follow_op/mock_block_data_follow.json mock_data/block_data/community_op/mock_block_data_community.json --community-start-block 4999998 2>&1 | tee -i hivemind-sync.log
rm hive_sync.pid
#!/bin/bash
set -e
set -o pipefail
HIVEMIND_DB_NAME=$1
HIVEMIND_POSTGRESQL_CONNECTION_STRING=$2
HIVEMIND_SOURCE_HIVED_URL=$3
HIVEMIND_MAX_BLOCK=$4
HIVEMIND_HTTP_PORT=$5
HIVEMIND_ENABLE_DB_MONITORING=${6:-yes}
PYTHONUSERBASE=./local-site
DB_NAME=${HIVEMIND_DB_NAME//-/_}
DB_NAME=${DB_NAME//\[/_}
DB_NAME=${DB_NAME//]/_}
DB_URL=$HIVEMIND_POSTGRESQL_CONNECTION_STRING/$DB_NAME
echo Corrected db name $DB_NAME
echo Corrected db url $DB_URL
# Reuse DB_NAME as name of symbolic link pointing local hive "binary".
HIVE_NAME=$DB_NAME
if [ -f hive_sync.pid ]; then
kill -SIGINT `cat hive_sync.pid` || true;
rm hive_sync.pid;
fi
kill -SIGINT `pgrep -f "$HIVE_NAME sync"` || true;
sleep 5
kill -9 `pgrep -f "$HIVE_NAME sync"` || true;
kill -SIGINT `pgrep -f "$HIVE_NAME server"` || true;
sleep 5
kill -9 `pgrep -f "$HIVE_NAME server"` || true;
fuser $HIVEMIND_HTTP_PORT/tcp -k -INT || true
sleep 5
fuser $HIVEMIND_HTTP_PORT/tcp -k -KILL || true
sleep 5
ls -l dist/*
rm -rf ./local-site
mkdir -p `python3 -m site --user-site`
python3 setup.py install --user --force
ln -sf ./local-site/bin/hive $HIVE_NAME
./$HIVE_NAME -h
echo Attempting to recreate database $DB_NAME
psql -U $POSTGRES_USER -h localhost -d postgres -c "DROP DATABASE IF EXISTS $DB_NAME;"
if [ "$HIVEMIND_ENABLE_DB_MONITORING" = "yes" ]; then
psql -U $POSTGRES_USER -h localhost -d postgres -c "CREATE DATABASE $DB_NAME TEMPLATE template_monitoring;"
else
psql -U $POSTGRES_USER -h localhost -d postgres -c "CREATE DATABASE $DB_NAME"
fi
echo Attempting to starting hive sync using hived node: $HIVEMIND_SOURCE_HIVED_URL . Max sync block is: $HIVEMIND_MAX_BLOCK
echo Attempting to access database $DB_URL
./$HIVE_NAME sync --pid-file hive_sync.pid --test-max-block=$HIVEMIND_MAX_BLOCK --exit-after-sync --test-profile=False --steemd-url "$HIVEMIND_SOURCE_HIVED_URL" --prometheus-port 11011 --database-url $DB_URL --mock-block-data-path mock_data/block_data/follow_op/mock_block_data_follow.json mock_data/block_data/community_op/mock_block_data_community.json mock_data/block_data/reblog_op/mock_block_data_reblog.json --community-start-block 4999998 2>&1 | tee -i hivemind-sync.log
rm hive_sync.pid
Subproject commit eb454fc8250a988c1c5a4440a3d138b8f3941232
Subproject commit 88c50bc23cece0e6ba9a80018d689a3eeea27e18
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment