Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • hive/haf
  • dan/haf
2 results
Show changes
Commits on Source (13)
Showing
with 540 additions and 173 deletions
......@@ -407,7 +407,7 @@ replay_with_keyauths:
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/keyauth.sh
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/tests/integration/state_provider/run_replay_with_keyauth.sh
artifacts:
paths:
- "replay_with_keyauths.log"
......@@ -417,6 +417,27 @@ replay_with_keyauths:
- public-runner-docker
- hived-for-tests
replay_with_json_metadata:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/tests/integration/state_provider/run_replay_with_json_metadata.sh
artifacts:
paths:
- "replay_with_json_metadata.log"
- "node_logs.log"
- "node_logs1.log"
tags:
- public-runner-docker
- hived-for-tests
replay_with_app:
extends: .job-defaults
......
Subproject commit 662544878f1aafaec4e603e2c5b9d195a23fddd4
Subproject commit 2760a93ce272d2364677db0baacf1c5afe41403d
include:
- project: 'hive/hive'
ref: 706141b2ee0991c43cb8f544ec5192016adbc1d6 #develop
ref: 2760a93ce272d2364677db0baacf1c5afe41403d #develop
file: '/scripts/ci-helpers/prepare_data_image_job.yml'
.prepare_haf_image:
......
#!/bin/bash
set -e
set -o pipefail
SCRIPTDIR="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
export DB_NAME=haf_block_log
export DB_ADMIN="haf_admin"
process_blocks() {
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -f "${SCRIPTDIR}/keyauth_live_schema.sql"
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "CALL keyauth_live.main('keyauth_live', 0, 5000000, 500000);"
}
process_blocks $PROCESS_BLOCK_LIMIT
\ No newline at end of file
......@@ -39,6 +39,16 @@ CREATE OR REPLACE FUNCTION hive.get_hf09_keyauths_wrapper()
RETURNS SETOF hive.keyauth_c_record_type
AS 'MODULE_PATHNAME', 'get_hf09_keyauths_wrapped' LANGUAGE C;
DROP FUNCTION IF EXISTS hive.get_hf21_keyauths_wrapper;
CREATE OR REPLACE FUNCTION hive.get_hf21_keyauths_wrapper()
RETURNS SETOF hive.keyauth_c_record_type
AS 'MODULE_PATHNAME', 'get_hf21_keyauths_wrapped' LANGUAGE C;
DROP FUNCTION IF EXISTS hive.get_hf24_keyauths_wrapper;
CREATE OR REPLACE FUNCTION hive.get_hf24_keyauths_wrapper()
RETURNS SETOF hive.keyauth_c_record_type
AS 'MODULE_PATHNAME', 'get_hf24_keyauths_wrapped' LANGUAGE C;
DROP FUNCTION IF EXISTS hive.key_type_c_int_to_enum;
CREATE OR REPLACE FUNCTION hive.key_type_c_int_to_enum(IN _pos integer)
RETURNS hive.key_type
......@@ -114,6 +124,42 @@ BEGIN
END
$$;
DROP FUNCTION IF EXISTS hive.get_hf21_keyauths;
CREATE OR REPLACE FUNCTION hive.get_hf21_keyauths()
RETURNS SETOF hive.keyauth_record_type
LANGUAGE plpgsql
IMMUTABLE
AS
$$
BEGIN
RETURN QUERY SELECT
account_name,
hive.key_type_c_int_to_enum(authority_c_kind),
key_auth,
account_auth,
weight_threshold,
w
FROM hive.get_hf21_keyauths_wrapper();
END
$$;
DROP FUNCTION IF EXISTS hive.get_hf24_keyauths;
CREATE OR REPLACE FUNCTION hive.get_hf24_keyauths()
RETURNS SETOF hive.keyauth_record_type
LANGUAGE plpgsql
IMMUTABLE
AS
$$
BEGIN
RETURN QUERY SELECT
account_name,
hive.key_type_c_int_to_enum(authority_c_kind),
key_auth,
account_auth,
weight_threshold,
w
FROM hive.get_hf24_keyauths_wrapper();
END
$$;
DROP FUNCTION IF EXISTS hive.is_keyauths_operation;
......@@ -7,7 +7,7 @@ CREATE TYPE hive.metadata_record_type AS
);
DROP FUNCTION IF EXISTS hive.get_metadata;
CREATE OR REPLACE FUNCTION hive.get_metadata(IN _operation_body hive.operation)
CREATE OR REPLACE FUNCTION hive.get_metadata(IN _operation_body hive.operation, IN _is_hf21 bool)
RETURNS SETOF hive.metadata_record_type
AS 'MODULE_PATHNAME', 'get_metadata' LANGUAGE C;
......@@ -80,9 +80,19 @@ collected_keyauth_collection_t collect_hf09_keyauths()
return hive::app::operation_get_hf09_keyauths();
}
collected_metadata_collection_t collect_metadata(const hive::protocol::operation& op)
collected_keyauth_collection_t collect_hf21_keyauths()
{
return hive::app::operation_get_metadata(op);
return hive::app::operation_get_hf21_keyauths();
}
collected_keyauth_collection_t collect_hf24_keyauths()
{
return hive::app::operation_get_hf24_keyauths();
}
collected_metadata_collection_t collect_metadata(const hive::protocol::operation& op, const bool is_hf21)
{
return hive::app::operation_get_metadata(op, is_hf21);
}
} // namespace
......@@ -571,8 +581,8 @@ Datum get_impacted_balances(PG_FUNCTION_ARGS)
fill_return_tuples(collected_keyauths, fcinfo,
[] (const auto& collected_item) { return make_datum_pair(CStringGetTextDatum(collected_item.account_name.c_str()));},
[] (const auto& collected_item) { return make_datum_pair(Int32GetDatum(collected_item.key_kind));},
[] (const auto& collected_item) { return make_datum_pair(public_key_data_to_bytea_datum(collected_item.key_auth), !collected_item.keyauth_variant);},
[] (const auto& collected_item) { return make_datum_pair(CStringGetTextDatum(collected_item.account_auth.c_str()), collected_item.keyauth_variant);},
[] (const auto& collected_item) { return make_datum_pair(public_key_data_to_bytea_datum(collected_item.key_auth), collected_item.allow_null_in_key_auth());},
[] (const auto& collected_item) { return make_datum_pair(CStringGetTextDatum(collected_item.account_auth.c_str()), collected_item.allow_null_in_account_auth());},
[] (const auto& collected_item) { return make_datum_pair(Int32GetDatum(collected_item.weight_threshold));},
[] (const auto& collected_item) { return make_datum_pair(Int32GetDatum(collected_item.w));}
);
......@@ -650,7 +660,31 @@ Datum get_impacted_balances(PG_FUNCTION_ARGS)
return (Datum)0;
}
PG_FUNCTION_INFO_V1(get_hf21_keyauths_wrapped);
Datum get_hf21_keyauths_wrapped(PG_FUNCTION_ARGS)
{
collected_keyauth_collection_t collected_keyauths;
collected_keyauths = collect_hf21_keyauths();
fill_and_return_keyauths(collected_keyauths, fcinfo);
return (Datum)0;
}
PG_FUNCTION_INFO_V1(get_hf24_keyauths_wrapped);
Datum get_hf24_keyauths_wrapped(PG_FUNCTION_ARGS)
{
collected_keyauth_collection_t collected_keyauths;
collected_keyauths = collect_hf24_keyauths();
fill_and_return_keyauths(collected_keyauths, fcinfo);
return (Datum)0;
}
PG_FUNCTION_INFO_V1(get_metadata);
......@@ -670,14 +704,15 @@ Datum get_impacted_balances(PG_FUNCTION_ARGS)
Datum get_metadata(PG_FUNCTION_ARGS)
{
_operation* operation_body = PG_GETARG_HIVE_OPERATION_PP( 0 );
const bool is_hf21 = PG_GETARG_BOOL( 1 );
collected_metadata_collection_t collected_metadata;
colect_operation_data_and_fill_returned_recordset(
[=, &collected_metadata](const hive::protocol::operation& op)
[=, &collected_metadata, &is_hf21](const hive::protocol::operation& op)
{
collected_metadata = collect_metadata(op);
collected_metadata = collect_metadata(op, is_hf21);
},
......
......@@ -116,8 +116,10 @@ BEGIN
DECLARE
__account_ae_count INT;
__key_ae_count INT;
__HARDFROK_9_block_num INT := 3202773;
__op_serial_id_dummy INT := 5036543;
__HARDFORK_9_block_num INT := 3202773;
__HARDFORK_21_block_num INT := 35921786;
__HARDFORK_24_block_num INT := 47797680;
__op_serial_id_dummy INT := 5036543;
BEGIN
......@@ -140,27 +142,62 @@ BEGIN
1
FROM hive.get_genesis_keyauths() as g
WHERE _first_block <= 1 AND 1 <= _last_block
)
,
),
-- Hard fork 9 fixes some accounts that were compromised
HARDFROK_9_fixed_auth_records AS MATERIALIZED
HARDFORK_9_fixed_auth_records AS MATERIALIZED
(
SELECT
(SELECT a.id FROM hive.%1$s_accounts_view a WHERE a.name = h.account_name) as account_id,
*,
__op_serial_id_dummy as op_serial_id,
__HARDFROK_9_block_num as block_num,
(SELECT b.created_at FROM hive.blocks b WHERE b.num = __HARDFROK_9_block_num) as timestamp,
__HARDFORK_9_block_num as block_num,
(SELECT b.created_at FROM hive.blocks b WHERE b.num = __HARDFORK_9_block_num) as timestamp,
hive.calculate_operation_stable_id
(
__HARDFROK_9_block_num,
(SELECT MAX(o.trx_in_block) FROM hive.operations o WHERE o.block_num = __HARDFROK_9_block_num),
__HARDFORK_9_block_num,
(SELECT MAX(o.trx_in_block) FROM hive.operations o WHERE o.block_num = __HARDFORK_9_block_num),
0
) as op_stable_id
FROM hive.get_hf09_keyauths() h
WHERE _first_block <= __HARDFROK_9_block_num AND __HARDFROK_9_block_num <= _last_block
)
,
WHERE _first_block <= __HARDFORK_9_block_num AND __HARDFORK_9_block_num <= _last_block
),
HARDFORK_21_fixed_auth_records AS MATERIALIZED
(
SELECT
(SELECT a.id FROM hive.%1$s_accounts_view a WHERE a.name = h.account_name) as account_id,
*,
__op_serial_id_dummy as op_serial_id,
__HARDFORK_21_block_num as block_num,
(SELECT b.created_at FROM hive.blocks b WHERE b.num = __HARDFORK_21_block_num) as timestamp,
hive.calculate_operation_stable_id
(
__HARDFORK_21_block_num,
(SELECT MAX(o.trx_in_block) FROM hive.operations o WHERE o.block_num = __HARDFORK_21_block_num),
0
) as op_stable_id
FROM hive.get_hf21_keyauths() h
WHERE _first_block <= __HARDFORK_21_block_num AND __HARDFORK_21_block_num <= _last_block
),
HARDFORK_24_fixed_auth_records AS MATERIALIZED
(
SELECT
(SELECT a.id FROM hive.%1$s_accounts_view a WHERE a.name = h.account_name) as account_id,
*,
__op_serial_id_dummy as op_serial_id,
__HARDFORK_24_block_num as block_num,
(SELECT b.created_at FROM hive.blocks b WHERE b.num = __HARDFORK_24_block_num) as timestamp,
hive.calculate_operation_stable_id
(
__HARDFORK_24_block_num,
(SELECT MAX(o.trx_in_block) FROM hive.operations o WHERE o.block_num = __HARDFORK_24_block_num),
0
) as op_stable_id
FROM hive.get_hf24_keyauths() h
WHERE _first_block <= __HARDFORK_24_block_num AND __HARDFORK_24_block_num <= _last_block
),
-- Handle 'pow' operation:
-- 1. Distinguish between existing accounts and new account creation.
......@@ -345,61 +382,77 @@ BEGIN
timestamp,
op_stable_id
FROM
HARDFROK_9_fixed_auth_records
HARDFORK_9_fixed_auth_records
UNION ALL
SELECT
account_id,
account_name,
key_kind,
key_auth,
account_auth,
weight_threshold,
w,
op_serial_id,
block_num,
timestamp,
op_stable_id
FROM
HARDFORK_21_fixed_auth_records
UNION ALL
SELECT
account_id,
account_name,
key_kind,
key_auth,
account_auth,
weight_threshold,
w,
op_serial_id,
block_num,
timestamp,
op_stable_id
FROM
HARDFORK_24_fixed_auth_records
UNION ALL
SELECT *
FROM
genesis_auth_records
),
effective_key_auth_records as materialized
effective_key_or_account_auth_records as materialized
(
with effective_tuple_ids as materialized
(
select s.account_id, s.key_kind, max(s.op_stable_id) as op_stable_id
from extended_auth_records s
where s.key_auth IS NOT NULL
from extended_auth_records s
group by s.account_id, s.key_kind
)
select s1.*
from extended_auth_records s1
join effective_tuple_ids e ON e.account_id = s1.account_id and e.key_kind = s1.key_kind and e.op_stable_id = s1.op_stable_id
where s1.key_auth IS NOT NULL
),
effective_account_auth_records as materialized
(
with effective_tuple_ids as materialized
(
select s.account_id, s.key_kind, max(s.op_stable_id) as op_stable_id
from extended_auth_records s
where s.key_auth IS NULL
group by s.account_id, s.key_kind
)
select s1.*
from extended_auth_records s1
join effective_tuple_ids e ON e.account_id = s1.account_id and e.key_kind = s1.key_kind and e.op_stable_id = s1.op_stable_id
where s1.key_auth IS NULL
),
--- PROCESSING OF KEY BASED AUTHORITIES ---
supplement_key_dictionary as materialized
(
insert into hive.%1$s_keyauth_k as dict (key)
SELECT DISTINCT s.key_auth
FROM effective_key_auth_records s
FROM effective_key_or_account_auth_records s
where s.key_auth IS NOT NULL
on conflict (key) do update set key = EXCLUDED.key -- the only way to always get key-id (even it is already in dict)
returning (xmax = 0) as is_new_key, dict.key_id, dict.key
),
extended_key_auth_records as materialized
(
select s.*, kd.key_id
from effective_key_auth_records s
from effective_key_or_account_auth_records s
join supplement_key_dictionary kd on kd.key = s.key_auth
where s.key_auth IS NOT NULL
),
changed_key_authorities as materialized
(
select distinct s.account_id as changed_account_id, s.key_kind as changed_key_kind
from extended_key_auth_records s
from effective_key_or_account_auth_records s
)
,delete_obsolete_key_auth_records as materialized (
DELETE FROM hive.%1$s_keyauth_a as ea
......@@ -437,28 +490,19 @@ BEGIN
SELECT (select a.id FROM hive.%1$s_accounts_view a
where a.name = s.account_auth) as account_auth_id,
s.*
FROM effective_account_auth_records s
FROM effective_key_or_account_auth_records s
) ds
WHERE ds.account_auth_id IS NOT NULL
),
changed_account_authorities as materialized
(
select distinct s.account_id as changed_account_id, s.key_kind as changed_key_kind
from extended_account_auth_records s
from effective_key_or_account_auth_records s
),
combined_keys AS (
SELECT changed_account_id, changed_key_kind
FROM changed_account_authorities
UNION ALL
SELECT changed_account_id, changed_key_kind
FROM changed_key_authorities
)
,
delete_obsolete_account_auth_records as materialized
(
DELETE FROM hive.%1$s_accountauth_a as ae
using combined_keys s
using changed_account_authorities s
where account_id = s.changed_account_id and key_kind = s.changed_key_kind
RETURNING account_id as cleaned_account_id, key_kind as cleaned_key_kind, account_auth_id as cleaned_account_auth_id
)
......
......@@ -9,17 +9,6 @@ BEGIN
END
$BODY$;
CREATE OR REPLACE FUNCTION hive.get_metadata_posting_function_name( _context hive.context_name )
RETURNS TEXT
LANGUAGE plpgsql
IMMUTABLE
AS
$BODY$
BEGIN
RETURN format( 'hive.%I_metadata_update_posting', _context );
END
$BODY$;
CREATE OR REPLACE FUNCTION hive.start_provider_metadata( _context hive.context_name )
RETURNS TEXT[]
LANGUAGE plpgsql
......@@ -54,11 +43,21 @@ BEGIN
VOLATILE
AS
$$
DECLARE
__state INT := 0;
BEGIN
IF COALESCE( ( SELECT _blockFrom > block_num FROM hive.applied_hardforks WHERE hardfork_num = 21 ), FALSE ) THEN
__state := 1;
ELSIF COALESCE( ( SELECT _blockTo <= block_num FROM hive.applied_hardforks WHERE hardfork_num = 21 ), FALSE ) THEN
__state := -1;
END IF;
WITH select_metadata AS MATERIALIZED (
SELECT
(hive.get_metadata(ov.body_binary)).*,
ov.id
ov.body_binary,
ov.id,
ov.block_num
FROM
hive.%s_operations_view ov
WHERE
......@@ -70,78 +69,79 @@ BEGIN
''hive::protocol::account_create_with_delegation_operation'',
''hive::protocol::account_update2_operation''))
AND ov.block_num BETWEEN _blockFrom AND _blockTo
)
INSERT INTO
hive.%s_metadata(account_id, json_metadata)
), calculated_metadata AS MATERIALIZED
(
SELECT
(hive.get_metadata
(
sm.body_binary,
CASE __state
WHEN 1 THEN TRUE
WHEN 0 THEN COALESCE( ( SELECT block_num < sm.block_num FROM hive.applied_hardforks WHERE hardfork_num = 21 ), FALSE )
WHEN -1 THEN FALSE
END
)).*,
sm.id
FROM select_metadata sm
),
prepare_accounts AS MATERIALIZED
(
SELECT
accounts_view.id,
json_metadata
FROM
(
SELECT
DISTINCT ON (metadata.account_name) metadata.account_name,
metadata.json_metadata
FROM select_metadata as metadata
WHERE metadata.json_metadata != ''''
ORDER BY
metadata.account_name,
metadata.id DESC
) as t
JOIN hive.accounts_view accounts_view ON accounts_view.name = account_name
ON CONFLICT (account_id) DO UPDATE
SET
json_metadata = EXCLUDED.json_metadata;
END
$$;'
, hive.get_metadata_update_function_name( _context ), _context, _context
);
EXECUTE format('
CREATE OR REPLACE FUNCTION %I(_blockFrom INT, _blockTo INT )
RETURNS void
LANGUAGE plpgsql
VOLATILE
AS
$$
BEGIN
WITH select_metadata AS MATERIALIZED (
metadata.account_name
FROM calculated_metadata as metadata
GROUP BY metadata.account_name
),
select_json_metadata AS MATERIALIZED
(
SELECT
(hive.get_metadata(ov.body_binary)).*,
ov.id
FROM
hive.%s_operations_view ov
WHERE
ov.op_type_id in (
SELECT id FROM hive.operation_types WHERE name IN
(''hive::protocol::account_create_operation'',
''hive::protocol::account_update_operation'',
''hive::protocol::create_claimed_account_operation'',
''hive::protocol::account_create_with_delegation_operation'',
''hive::protocol::account_update2_operation''))
AND ov.block_num BETWEEN _blockFrom AND _blockTo
DISTINCT ON (metadata.account_name) metadata.account_name,
metadata.json_metadata
FROM calculated_metadata as metadata
WHERE metadata.json_metadata != ''''
ORDER BY
metadata.account_name,
metadata.id DESC
),
select_posting_json_metadata AS MATERIALIZED
(
SELECT
DISTINCT ON (metadata.account_name) metadata.account_name,
metadata.posting_json_metadata
FROM calculated_metadata as metadata
WHERE metadata.posting_json_metadata != ''''
ORDER BY
metadata.account_name,
metadata.id DESC
)
INSERT INTO
hive.%s_metadata(account_id, posting_json_metadata)
hive.%s_metadata(account_id, json_metadata, posting_json_metadata)
SELECT
accounts_view.id,
posting_json_metadata
FROM
(
SELECT
DISTINCT ON (metadata.account_name) metadata.account_name,
metadata.posting_json_metadata
FROM select_metadata as metadata
WHERE metadata.posting_json_metadata != ''''
ORDER BY
metadata.account_name,
metadata.id DESC
) as t
JOIN hive.accounts_view accounts_view ON accounts_view.name = account_name
av.id,
COALESCE(sjm.json_metadata, ''''),
COALESCE(pjm.posting_json_metadata, '''')
FROM prepare_accounts pa
LEFT JOIN select_json_metadata sjm ON sjm.account_name = pa.account_name
LEFT JOIN select_posting_json_metadata pjm ON pjm.account_name = pa.account_name
JOIN hive.accounts_view av ON av.name = pa.account_name
ON CONFLICT (account_id) DO UPDATE
SET posting_json_metadata = EXCLUDED.posting_json_metadata;
SET
json_metadata =
(
CASE EXCLUDED.json_metadata
WHEN '''' THEN hive.%s_metadata.json_metadata
ELSE EXCLUDED.json_metadata
END
),
posting_json_metadata =
(
CASE EXCLUDED.posting_json_metadata
WHEN '''' THEN hive.%s_metadata.posting_json_metadata
ELSE EXCLUDED.posting_json_metadata
END
);
END
$$;'
, hive.get_metadata_posting_function_name( _context ), _context, _context
, hive.get_metadata_update_function_name( _context ), _context, _context, _context, _context
);
RETURN ARRAY[ __table_name ];
......@@ -174,13 +174,6 @@ BEGIN
, _first_block
, _last_block
);
EXECUTE format(
'SELECT %I(%s, %s);'
, hive.get_metadata_posting_function_name( _context )
, _first_block
, _last_block
);
END
$BODY$
;
......@@ -206,11 +199,6 @@ BEGIN
'DROP FUNCTION IF EXISTS %I'
, hive.get_metadata_update_function_name( _context )
);
EXECUTE format(
'DROP FUNCTION IF EXISTS %I'
, hive.get_metadata_posting_function_name( _context )
);
END;
$BODY$
;
......@@ -96,7 +96,7 @@ BEGIN
"value": {
"account": "howo",
"json_metadata": "",
"posting_json_metadata": "",
"posting_json_metadata": "{}",
"extensions": []
}
}'::jsonb::hive.operation
......@@ -581,7 +581,7 @@ AS
$BODY$
BEGIN
PERFORM ASSERT_METADATA_VALUES(6 /*'test-safari'*/ , '','{"profile":{"name":"Leonardo Da VinciXX","about":"Renaissance man, vegetarian, inventor of the helicopter in 1512 and painter of the Mona Lisa..","website":"http://www.davincilife.com/","location":"Florence","cover_image":"https://ichef.bbci.co.uk/news/912/cpsprodpb/CE63/production/_106653825_be212f00-f8c5-43d2-b4ad-f649e6dc4c1e.jpg","profile_image":"https://www.parhlo.com/wp-content/uploads/2016/01/tmp617041537745813506.jpg"}}');
PERFORM ASSERT_METADATA_VALUES(7 /*'howo'*/ , '{}', '""');
PERFORM ASSERT_METADATA_VALUES(7 /*'howo'*/ , '{}', '{}');
PERFORM ASSERT_METADATA_VALUES(8 /*'bassman077'*/ , '{"maleficiaries":[{"name":"oracle-d","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"},{"name":"spk.beneficiary","label":"referrer","weight":300}]}', '');
PERFORM ASSERT_METADATA_VALUES(9 /*'spscontest'*/ ,'','""');
PERFORM ASSERT_METADATA_VALUES(10 /*'xenomorphosis'*/,'','{}');
......@@ -591,7 +591,7 @@ BEGIN
PERFORM ASSERT_METADATA_VALUES(14 /*'margemnlpz08'*/ ,'{"profile":{"about":"This account was instantly created via @hivewallet.app - available for iOS and Android!","website":"https://hivewallet.app"}}','');
PERFORM ASSERT_METADATA_VALUES(15 /*'steem.kit'*/ ,'{"owner":"genievot"}','');
PERFORM ASSERT_METADATA_VALUES(16 /*'jte1023'*/ ,'{"profile":{"name":"Jeremy","about":" ","cover_image":"https://files.peakd.com/file/peakd-hive/jte1023/7C47EDD4-517A-414B-8222-4DD365FB301A.jpeg","profile_image":"https://files.peakd.com/file/peakd-hive/jte1023/1029B838-2E4B-4892-9E3A-964B9ABB168A.jpeg","website":" ","location":"NC, USA","pinned":"","version":2,"portfolio":"enabled","trail":true,"collections":"enabled"}}', '{"profile":{"name":"Jeremy","about":" ","cover_image":"https://files.peakd.com/file/peakd-hive/jte1023/7C47EDD4-517A-414B-8222-4DD365FB301A.jpeg","profile_image":"https://files.peakd.com/file/peakd-hive/jte1023/1029B838-2E4B-4892-9E3A-964B9ABB168A.jpeg","website":" ","location":"NC, USA","pinned":"","version":2,"portfolio":"enabled","trail":true,"collections":"enabled"}}');
PERFORM ASSERT_METADATA_VALUES(17 /*'adedayoolumide'*/,'{"beneficiaries":[{"name":"threespeak","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}','');
PERFORM ASSERT_METADATA_VALUES(17 /*'adedayoolumide'*/,'{"beneficiaries":[{"name":"threespeak","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}','{"beneficiaries":[{"name":"threespeak","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}');
PERFORM ASSERT_METADATA_VALUES(18 /*'eos-polska'*/ ,'{"beneficiaries":[{"name":"fractalnode","weight":300,"label":"referrer"},{"name":"ocdb","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}','');
END;
......@@ -604,7 +604,7 @@ AS
$BODY$
BEGIN
PERFORM ASSERT_METADATA_VALUES(6 /*'test-safari'*/ , '','{"profile":{"name":"Leonardo Da VinciXX","about":"Renaissance man, vegetarian, inventor of the helicopter in 1512 and painter of the Mona Lisa..","website":"http://www.davincilife.com/","location":"Florence","cover_image":"https://ichef.bbci.co.uk/news/912/cpsprodpb/CE63/production/_106653825_be212f00-f8c5-43d2-b4ad-f649e6dc4c1e.jpg","profile_image":"https://www.parhlo.com/wp-content/uploads/2016/01/tmp617041537745813506.jpg"}}');
PERFORM ASSERT_METADATA_VALUES(7 /*'howo'*/ , '{}', '""');
PERFORM ASSERT_METADATA_VALUES(7 /*'howo'*/ , '{}', '{}');
PERFORM ASSERT_METADATA_VALUES(8 /*'bassman077'*/ , '{"maleficiaries":[{"name":"oracle-d","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"},{"name":"spk.beneficiary","label":"referrer","weight":300}]}', '');
PERFORM ASSERT_METADATA_VALUES(9 /*'spscontest'*/ ,'','""');
PERFORM ASSERT_METADATA_VALUES(10 /*'xenomorphosis'*/,'','{}');
......@@ -614,7 +614,7 @@ BEGIN
PERFORM ASSERT_METADATA_VALUES(14 /*'margemnlpz08'*/ ,'{"profile":{"about":"This account was instantly created via @hivewallet.app - available for iOS and Android!","website":"https://hivewallet.app"}}','');
PERFORM ASSERT_METADATA_VALUES(15 /*'steem.kit'*/ ,'{"owner":"genievot"}','');
PERFORM ASSERT_METADATA_VALUES(16 /*'jte1023'*/ ,'{"profile":{"name":"Jeremy","about":" ","cover_image":"https://files.peakd.com/file/peakd-hive/jte1023/7C47EDD4-517A-414B-8222-4DD365FB301A.jpeg","profile_image":"https://files.peakd.com/file/peakd-hive/jte1023/1029B838-2E4B-4892-9E3A-964B9ABB168A.jpeg","website":" ","location":"NC, USA","pinned":"","version":2,"portfolio":"enabled","trail":true,"collections":"enabled"}}', '{"profile":{"name":"Jeremy","about":" ","cover_image":"https://files.peakd.com/file/peakd-hive/jte1023/7C47EDD4-517A-414B-8222-4DD365FB301A.jpeg","profile_image":"https://files.peakd.com/file/peakd-hive/jte1023/1029B838-2E4B-4892-9E3A-964B9ABB168A.jpeg","website":" ","location":"NC, USA","pinned":"","version":2,"portfolio":"enabled","trail":true,"collections":"enabled"}}');
PERFORM ASSERT_METADATA_VALUES(17 /*'adedayoolumide'*/,'{"beneficiaries":[{"name":"threespeak","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}','');
PERFORM ASSERT_METADATA_VALUES(17 /*'adedayoolumide'*/,'{"beneficiaries":[{"name":"threespeak","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}','{"beneficiaries":[{"name":"threespeak","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}');
PERFORM ASSERT_METADATA_VALUES(18 /*'eos-polska'*/ ,'{"beneficiaries":[{"name":"fractalnode","weight":300,"label":"referrer"},{"name":"ocdb","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}','');
END;
$BODY$
......@@ -626,7 +626,7 @@ AS
$BODY$
BEGIN
PERFORM ASSERT_METADATA_VALUES(6 /*'test-safari'*/ , '','{"profile":{"name":"Leonardo Da VinciXX","about":"Renaissance man, vegetarian, inventor of the helicopter in 1512 and painter of the Mona Lisa..","website":"http://www.davincilife.com/","location":"Florence","cover_image":"https://ichef.bbci.co.uk/news/912/cpsprodpb/CE63/production/_106653825_be212f00-f8c5-43d2-b4ad-f649e6dc4c1e.jpg","profile_image":"https://www.parhlo.com/wp-content/uploads/2016/01/tmp617041537745813506.jpg"}}');
PERFORM ASSERT_METADATA_VALUES(7 /*'howo'*/ , '{}', '""');
PERFORM ASSERT_METADATA_VALUES(7 /*'howo'*/ , '{}', '{}');
PERFORM ASSERT_METADATA_VALUES(8 /*'bassman077'*/ , '{"maleficiaries":[{"name":"oracle-d","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"},{"name":"spk.beneficiary","label":"referrer","weight":300}]}', '');
PERFORM ASSERT_METADATA_VALUES(9 /*'spscontest'*/ ,'','""');
PERFORM ASSERT_METADATA_VALUES(10 /*'xenomorphosis'*/,'','{}');
......@@ -636,7 +636,7 @@ BEGIN
PERFORM ASSERT_METADATA_VALUES(14 /*'margemnlpz08'*/ ,'{"profile":{"about":"This account was instantly created via @hivewallet.app - available for iOS and Android!","website":"https://hivewallet.app"}}','');
PERFORM ASSERT_METADATA_VALUES(15 /*'steem.kit'*/ ,'{"owner":"genievot"}','');
PERFORM ASSERT_METADATA_VALUES(16 /*'jte1023'*/ ,'{"profile":{"name":"Jeremy","about":" ","cover_image":"https://files.peakd.com/file/peakd-hive/jte1023/7C47EDD4-517A-414B-8222-4DD365FB301A.jpeg","profile_image":"https://files.peakd.com/file/peakd-hive/jte1023/1029B838-2E4B-4892-9E3A-964B9ABB168A.jpeg","website":" ","location":"NC, USA","pinned":"","version":2,"portfolio":"enabled","trail":true,"collections":"enabled"}}', '{"profile":{"name":"Jeremy","about":" ","cover_image":"https://files.peakd.com/file/peakd-hive/jte1023/7C47EDD4-517A-414B-8222-4DD365FB301A.jpeg","profile_image":"https://files.peakd.com/file/peakd-hive/jte1023/1029B838-2E4B-4892-9E3A-964B9ABB168A.jpeg","website":" ","location":"NC, USA","pinned":"","version":2,"portfolio":"enabled","trail":true,"collections":"enabled"}}');
PERFORM ASSERT_METADATA_VALUES(17 /*'adedayoolumide'*/,'{"beneficiaries":[{"name":"threespeak","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}','');
PERFORM ASSERT_METADATA_VALUES(17 /*'adedayoolumide'*/,'{"beneficiaries":[{"name":"threespeak","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}','{"beneficiaries":[{"name":"threespeak","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}');
PERFORM ASSERT_METADATA_VALUES(18 /*'eos-polska'*/ ,'{"beneficiaries":[{"name":"fractalnode","weight":300,"label":"referrer"},{"name":"ocdb","weight":100,"label":"creator"},{"name":"hiveonboard","weight":100,"label":"provider"}]}','');
END;
$BODY$
......
log-appender = {"appender":"stderr","stream":"std_error"} {"appender":"p2p","file":"logs/p2p/p2p.log"}
log-logger = {"name":"default","level":"info","appender":"stderr"} {"name":"user","level":"debug","appender":"stderr"} {"name":"p2p","level":"warn","appender":"p2p"}
backtrace = yes
plugin = sql_serializer
history-disable-pruning = 0
account-history-rocksdb-path = "blockchain/account-history-rocksdb-storage"
block-data-export-file = NONE
block-log-info-print-interval-seconds = 86400
block-log-info-print-irreversible = 1
block-log-info-print-file = ILOG
shared-file-dir = "blockchain"
shared-file-size = 8G
shared-file-full-threshold = 0
shared-file-scale-rate = 0
follow-max-feed-size = 500
follow-start-feeds = 0
market-history-bucket-size = [15,60,300,3600,86400]
market-history-buckets-per-size = 5760
p2p-endpoint = 0.0.0.0:0
rc-skip-reject-not-enough-rc = 0
rc-compute-historical-rc = 0
rc-start-at-block = 0
snapshot-root-dir = "snapshot"
statsd-batchsize = 1
tags-start-promoted = 0
tags-skip-startup-update = 0
transaction-status-block-depth = 64000
transaction-status-track-after-block = 0
webserver-http-endpoint = 0.0.0.0:0
webserver-ws-endpoint = 0.0.0.0:0
webserver-thread-pool-size = 32
enable-stale-production = 0
required-participation = 33
witness-skip-enforce-bandwidth = 1
......@@ -5,13 +5,14 @@ import argparse
def main():
parser = argparse.ArgumentParser(description="Script to process JSON data and interact with PostgreSQL")
parser.add_argument("script_dir", help="Path to the directory containing 'accounts_dump.json'")
parser.add_argument("--script_dir", help="Path to the directory containing 'accounts_dump.json'")
parser.add_argument("--host", default="docker", help="PostgreSQL host (default: docker)")
parser.add_argument("--port", type=int, default=5432, help="PostgreSQL port (default: 5432)")
parser.add_argument("--database", default="haf_block_log", help="PostgreSQL database name (default: haf_block_log)")
parser.add_argument("--user", default="haf_admin", help="PostgreSQL user (default: haf_admin)")
parser.add_argument("--password", default="", help="PostgreSQL password (default: empty)")
parser.add_argument("--debug", action="store_true", help="Run in debug mode (default: false)")
parser.add_argument("--data_type", default="", help="Possible values: `keyauth` or `metadata`")
args = parser.parse_args()
......@@ -48,7 +49,12 @@ def main():
cursor = connection.cursor()
query = "SELECT keyauth_live.dump_current_account_stats(%s)"
if args.data_type == 'keyauth':
query = "SELECT keyauth_live.dump_current_account_stats(%s)"
elif args.data_type == 'metadata':
query = "SELECT metadata_live.dump_current_account_stats(%s)"
else:
raise Exception("Incorrect type of data. Possible values: `keyauth` or `metadata`")
# Iterate over objects inside 'accounts[]'
for account in accounts:
......
CREATE SCHEMA metadata_live;
CREATE OR REPLACE PROCEDURE metadata_live.main(
IN _appcontext character varying,
IN _from integer,
IN _to integer,
IN _step integer)
LANGUAGE 'plpgsql'
AS $BODY$
DECLARE
_last_block INT;
BEGIN
RAISE NOTICE 'Entering massive processing of block range: <%, %>...', _from, _to;
RAISE NOTICE 'Detaching HAF application context...';
PERFORM hive.app_context_detach(_appContext);
FOR b IN _from .. _to BY _step LOOP
_last_block := b + _step - 1;
IF _last_block > _to THEN
_last_block := _to;
END IF;
RAISE NOTICE 'Attempting to process a block range: <%, %>', b, _last_block;
PERFORM hive.app_state_providers_update(b, _last_block, _appContext);
RAISE NOTICE 'Block range: <%, %> processed successfully.', b, _last_block;
END LOOP;
RAISE NOTICE 'Leaving massive processing of block range: <%, %>...', _from, _to;
END
$BODY$;
CREATE TABLE IF NOT EXISTS metadata_live.jsons (
account text,
json_metadata text DEFAULT '',
posting_json_metadata text DEFAULT '',
CONSTRAINT pk_json_metadata_comparison PRIMARY KEY (account)
);
CREATE TABLE IF NOT EXISTS metadata_live.differing_accounts (
account TEXT
);
CREATE OR REPLACE FUNCTION metadata_live.current_state(_account text)
RETURNS SETOF metadata_live.jsons
LANGUAGE 'plpgsql' STABLE
AS
$$
BEGIN
RETURN QUERY
SELECT
_account,
m.json_metadata,
m.posting_json_metadata
FROM
hive.metadata_live_metadata m JOIN hive.accounts_view av ON m.account_id = av.id
WHERE av.name = _account;
END
$$;
CREATE OR REPLACE FUNCTION metadata_live.dump_current_account_stats(account_data jsonb)
RETURNS void
LANGUAGE 'plpgsql'
VOLATILE
AS
$$
BEGIN
INSERT INTO metadata_live.jsons
SELECT
account_data->>'name' as account,
account_data->>'json_metadata' as json_metadata,
account_data->>'posting_json_metadata' as posting_json_metadata;
END
$$;
CREATE OR REPLACE FUNCTION metadata_live.compare_accounts()
RETURNS void
LANGUAGE 'plpgsql'
VOLATILE
AS
$$
BEGIN
WITH jsons AS (
SELECT *
FROM metadata_live.jsons
)
INSERT INTO metadata_live.differing_accounts
SELECT jsons.account
FROM jsons
JOIN metadata_live.current_state(jsons.account) AS current_stats ON current_stats.account = jsons.account
WHERE
jsons.json_metadata != current_stats.json_metadata OR
jsons.posting_json_metadata != current_stats.posting_json_metadata;
END
$$;
#! /bin/bash
set -euo pipefail
CURRENT_PROJECT_DIR="$CI_PROJECT_DIR/tests/integration/state_provider"
source "${CURRENT_PROJECT_DIR}/state_provider_common_run.sh" metadata
#! /bin/bash
set -euo pipefail
CURRENT_PROJECT_DIR="$CI_PROJECT_DIR/tests/integration/state_provider"
source "${CURRENT_PROJECT_DIR}/state_provider_common_run.sh" keyauth
#! /bin/bash
set -xeuo pipefail
export REPO_DIR="$CI_PROJECT_DIR"
# container must have /blockchain directory mounted containing block_log with at 5000000 first blocks
export BLOCK_LOG_SOURCE_DIR_5M="/blockchain/block_log_5m"
export DATADIR="$CI_PROJECT_DIR/datadir"
export REPLAY="--replay-blockchain --stop-replay-at-block 5000000"
export HIVED_PATH="/home/hived/bin/hived"
export COMPRESS_BLOCK_LOG_PATH="/home/hived/bin/compress_block_log"
export DB_NAME=haf_block_log
export DB_ADMIN="haf_admin"
export SETUP_SCRIPTS_PATH="/home/haf_admin/haf/scripts"
test_start() {
pushd "$REPO_DIR"
echo "Will use tests from commit $(git rev-parse HEAD)"
exec > >(tee -i "${LOG_FILE}") 2>&1
}
test_end() {
echo done
}
......@@ -3,48 +3,45 @@
set -euo pipefail
SCRIPTPATH="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
SCRIPTSDIR="$SCRIPTPATH/.."
LOG_FILE=replay_with_keyauths.log
NAME=$1
source "$SCRIPTSDIR/maintenance-scripts/ci_common.sh"
if [ ${NAME} = "keyauth" ]; then
TYPE="KEYAUTH"
TABLE_NAME="keys"
else
TYPE="METADATA"
TABLE_NAME="jsons"
fi
test_start
CURRENT_PROJECT_DIR="$CI_PROJECT_DIR/tests/integration/state_provider"
LOG_FILE=replay_with_${NAME}.log
# container must have /blockchain directory mounted containing block_log with at 5000000 first blocks
export BLOCK_LOG_SOURCE_DIR_5M="/blockchain/block_log_5m"
export PATTERNS_PATH="${REPO_DIR}/tests/integration/replay/patterns/no_filter"
export DATADIR="${REPO_DIR}/datadir"
export REPLAY="--replay-blockchain --stop-replay-at-block 5000000"
export HIVED_PATH="/home/hived/bin/hived"
export COMPRESS_BLOCK_LOG_PATH="/home/hived/bin/compress_block_log"
export DB_NAME=haf_block_log
export DB_ADMIN="haf_admin"
export SETUP_SCRIPTS_PATH="/home/haf_admin/haf/scripts"
source "$CURRENT_PROJECT_DIR/state_provider_common.sh"
test_start
echo -e "\e[0Ksection_start:$(date +%s):replay[collapsed=true]\r\e[0KExecuting replay..."
test -n "$PATTERNS_PATH"
mkdir $DATADIR/blockchain -p
cp "$PATTERNS_PATH"/* "$DATADIR" -r
cp ${BLOCK_LOG_SOURCE_DIR_5M}/block_log $DATADIR/blockchain
cp ${CURRENT_PROJECT_DIR}/config.ini $DATADIR
$HIVED_PATH --data-dir "$DATADIR" $REPLAY --exit-before-sync --psql-url "postgresql:///$DB_NAME" 2>&1 | tee -i node_logs.log
echo -e "\e[0Ksection_end:$(date +%s):replay\r\e[0K"
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "SELECT hive.app_create_context('${NAME}_live');"
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "SELECT hive.app_state_provider_import('${TYPE}', '${NAME}_live');"
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "SELECT hive.app_create_context('keyauth_live');"
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "SELECT hive.app_state_provider_import('KEYAUTH', 'keyauth_live');"
echo "Replay of keyauths..."
bash "${SCRIPTPATH}/keyauths_comparison/process_klive.sh"
echo "Replay of ${NAME}..."
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -f "${CURRENT_PROJECT_DIR}/${NAME}/${NAME}_live_schema.sql"
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "CALL ${NAME}_live.main('${NAME}_live', 0, 5000000, 500000);"
echo "Clearing tables..."
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "TRUNCATE keyauth_live.keys;"
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "TRUNCATE keyauth_live.differing_accounts;"
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "TRUNCATE ${NAME}_live.${TABLE_NAME};"
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "TRUNCATE ${NAME}_live.differing_accounts;"
echo "Installing dependecies..."
echo "Installing dependencies..."
pip install psycopg2-binary
rm -f "${SCRIPTPATH}/keyauths_comparison/accounts_dump.json"
rm -f "${CURRENT_PROJECT_DIR}/account_data/accounts_dump.json"
# The line below is somewhat problematic. Gunzip by default deletes gz file after decompression,
# but the '-k' parameter, which prevents that from happening is not supported on some of its versions.
#
......@@ -52,23 +49,23 @@ rm -f "${SCRIPTPATH}/keyauths_comparison/accounts_dump.json"
# gunzip -c "${SCRIPTDIR}/accounts_dump.json.gz" > "${SCRIPTDIR}/accounts_dump.json"
# gzcat "${SCRIPTDIR}/accounts_dump.json.gz" > "${SCRIPTDIR}/accounts_dump.json"
# zcat "${SCRIPTDIR}/accounts_dump.json.gz" > "${SCRIPTDIR}/accounts_dump.json"
gunzip -k "${SCRIPTPATH}/keyauths_comparison/accounts_dump.json.gz"
gunzip -k "${CURRENT_PROJECT_DIR}/account_data/accounts_dump.json.gz"
echo "Starting data_insertion_script.py..."
python3 $SCRIPTPATH/keyauths_comparison/data_insertion_script.py "$SCRIPTPATH"/keyauths_comparison --host="/var/run/postgresql" #--debug
python3 ${CURRENT_PROJECT_DIR}/data_insertion.py --script_dir="${CURRENT_PROJECT_DIR}/account_data" --host="/var/run/postgresql" --data_type="${NAME}" #--debug
echo "Looking for diffrences between hived node and keyauths..."
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "SELECT keyauth_live.compare_accounts();"
echo "Looking for differences between hived node and ${NAME}..."
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "SELECT ${NAME}_live.compare_accounts();"
DIFFERING_ACCOUNTS=$(psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -t -A -c "SELECT * FROM keyauth_live.differing_accounts;")
DIFFERING_ACCOUNTS=$(psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -t -A -c "SELECT * FROM ${NAME}_live.differing_accounts;")
if [ -z "$DIFFERING_ACCOUNTS" ]; then
echo "keyauths are correct!"
echo "Result for ${NAME}: correct!"
exit 0
else
echo "keyauths are incorrect..."
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "SELECT * FROM keyauth_live.differing_accounts;"
echo "Result for ${NAME}: differences found. Incorrect."
psql -w -d $DB_NAME -v ON_ERROR_STOP=on -U $DB_ADMIN -c "SELECT * FROM ${NAME}_live.differing_accounts;"
exit 3
fi
test_end
\ No newline at end of file
test_end