Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • hive/haf
  • dan/haf
2 results
Show changes
Commits on Source (12)
Subproject commit 785aa74b7338c845294699d8457706909f28159e
Subproject commit 0eac44aab433807275931cd5f1c13df14ec4da1b
......@@ -13,15 +13,13 @@ ADD_PSQL_EXTENSION(
types/operation/operation_casts.sql
irreversible_blocks.sql
reversible_blocks.sql
block_views_for_head_block.sql
block_day_stats_view.sql
block_day_stats_all_op_view.sql
state_provider.sql
hived_connections.sql
hived_api_impl_indexes.sql
hived_api.sql
DEPLOY_SOURCES trigger_switch/trigger_off.sql
context_rewind/sink_id_functions.sql
context_rewind/names.sql
context_rewind/triggers.sql
context_rewind/event_triggers.sql
......@@ -33,6 +31,9 @@ ADD_PSQL_EXTENSION(
# This file must be also specified here, as it contains functions which definitions should be always overwritten.
types/operation/operation_flow.sql
tools.sql
block_views_for_head_block.sql
block_day_stats_view.sql
block_day_stats_all_op_view.sql
blocks_views_for_contexts.sql
get_keyauths.sql
get_metadata.sql
......
......@@ -7,39 +7,6 @@ CREATE TYPE hive.state_providers AS ENUM( 'ACCOUNTS', 'KEYAUTH' , 'METADATA' );
CREATE TYPE hive.event_type AS ENUM( 'BACK_FROM_FORK', 'NEW_BLOCK', 'NEW_IRREVERSIBLE', 'MASSIVE_SYNC' );
CREATE OR REPLACE FUNCTION hive.unreachable_event_id()
RETURNS BIGINT
LANGUAGE plpgsql
IMMUTABLE
AS
$$
BEGIN
RETURN 9223372036854775807; -- MAX BIGINT
END
$$;
CREATE OR REPLACE FUNCTION hive.block_sink_num()
RETURNS INT
LANGUAGE plpgsql
IMMUTABLE
AS
$$
BEGIN
RETURN 0;
END
$$;
CREATE OR REPLACE FUNCTION hive.account_sink_id()
RETURNS INT
LANGUAGE plpgsql
IMMUTABLE
AS
$$
BEGIN
RETURN -1;
END
$$;
CREATE TABLE IF NOT EXISTS hive.contexts(
id SERIAL NOT NULL,
name hive.context_name NOT NULL,
......
CREATE OR REPLACE FUNCTION hive.unreachable_event_id()
RETURNS BIGINT
LANGUAGE plpgsql
IMMUTABLE
AS
$$
BEGIN
RETURN 9223372036854775807; -- MAX BIGINT
END
$$;
CREATE OR REPLACE FUNCTION hive.block_sink_num()
RETURNS INT
LANGUAGE plpgsql
IMMUTABLE
AS
$$
BEGIN
RETURN 0;
END
$$;
CREATE OR REPLACE FUNCTION hive.account_sink_id()
RETURNS INT
LANGUAGE plpgsql
IMMUTABLE
AS
$$
BEGIN
RETURN -1;
END
$$;
......@@ -78,3 +78,4 @@ CREATE OR REPLACE FUNCTION hive.get_keyauths_operations()
RETURNS SETOF hive.get_operations_type
AS 'MODULE_PATHNAME', 'get_keyauths_operations' LANGUAGE C;
DROP FUNCTION IF EXISTS hive.is_keyauths_operation;
......@@ -21,3 +21,5 @@ DROP FUNCTION IF EXISTS hive.get_metadata_operations;
CREATE OR REPLACE FUNCTION hive.get_metadata_operations()
RETURNS SETOF hive.get_metadata_operations_type
AS 'MODULE_PATHNAME', 'get_metadata_operations' LANGUAGE C;
DROP FUNCTION IF EXISTS hive.is_metadata_operation;
......@@ -91,22 +91,22 @@ verify_table_schema() {
BEFORE_UPDATE=$(sudo -Enu "$DB_ADMIN" psql -w $POSTGRES_ACCESS -d "$DB_NAME" -v ON_ERROR_STOP=on -U "$DB_ADMIN" -t -A -c "SELECT schema_hash FROM hive.create_database_hash('hive')")
AFTER_UPDATE=$(sudo -Enu "$DB_ADMIN" psql -w $POSTGRES_ACCESS -d "$DB_NAME_AFTER_UPDATE" -v ON_ERROR_STOP=on -U "$DB_ADMIN" -t -A -c "SELECT schema_hash FROM hive.create_database_hash('hive')")
if [ "$BEFORE_UPDATE" = "$AFTER_UPDATE" ]; then
echo "The table schema is correct, verification completed."
echo "Dropping temporary database"
sudo -Enu "$DB_ADMIN" psql -aw $POSTGRES_ACCESS -d postgres -v ON_ERROR_STOP=on -U "$DB_ADMIN" -c "DROP DATABASE IF EXISTS $DB_NAME_AFTER_UPDATE;"
echo "The table schema is correct, verification completed."
echo "Dropping temporary database"
sudo -Enu "$DB_ADMIN" psql -aw $POSTGRES_ACCESS -d postgres -v ON_ERROR_STOP=on -U "$DB_ADMIN" -c "DROP DATABASE IF EXISTS $DB_NAME_AFTER_UPDATE;"
else
save_table_schema
echo "Table schema is inconsistent"
echo "COLUMNS"
diff --suppress-common-lines before_update_columns.txt after_update_columns.txt || true
echo "CONSTRAINTS"
diff --suppress-common-lines before_update_constraints.txt after_update_constraings.txt || true
echo "INDEXES"
diff --suppress-common-lines before_update_indexes.txt after_update_indexes.txt || true
echo "Dropping temporary database"
sudo -Enu "$DB_ADMIN" psql -aw $POSTGRES_ACCESS -d postgres -v ON_ERROR_STOP=on -U "$DB_ADMIN" -c "DROP DATABASE IF EXISTS $DB_NAME_AFTER_UPDATE;"
find -type f -name '*.txt' > /dev/null 2>&1
exit 1
save_table_schema
echo "Table schema is inconsistent"
echo "COLUMNS"
diff --suppress-common-lines before_update_columns.txt after_update_columns.txt || true
echo "CONSTRAINTS"
diff --suppress-common-lines before_update_constraints.txt after_update_constraings.txt || true
echo "INDEXES"
diff --suppress-common-lines before_update_indexes.txt after_update_indexes.txt || true
echo "Dropping temporary database"
sudo -Enu "$DB_ADMIN" psql -aw $POSTGRES_ACCESS -d postgres -v ON_ERROR_STOP=on -U "$DB_ADMIN" -c "DROP DATABASE IF EXISTS $DB_NAME_AFTER_UPDATE;"
find -type f -name '*.txt' > /dev/null 2>&1
exit 1
fi
}
......@@ -137,12 +137,52 @@ make_update() {
sudo -Enu "$DB_ADMIN" psql -w $POSTGRES_ACCESS -d "$DB_NAME" -v ON_ERROR_STOP=on -U "$DB_ADMIN" -c "ALTER EXTENSION hive_fork_manager UPDATE"
}
check_tables_dont_reference_haf_types() {
echo
echo "Checking that none table references HAF type..."
# hive.operation is explicitly ignored, because we never drop this type, so it's safe as a column.
query="
SELECT table_schema,table_name,column_name,udt_schema,udt_name
FROM information_schema.columns
WHERE udt_schema='hive' AND table_schema<>'hive' AND udt_name<>'operation'"
sudo -Enu "$DB_ADMIN" psql -w $POSTGRES_ACCESS -d "$DB_NAME" -v ON_ERROR_STOP=on -U "$DB_ADMIN" -q -t -A -c "$query" | \
awk -F'|' '{print($1"."$2, "contains column", $3, "of type", $4"."$5, "which would be dropped on upgrade")} END{exit NR > 0 ? 4 : 0}'
}
check_tables_dont_reference_haf_domains() {
echo
echo "Checking that none table references HAF domain..."
query="SELECT table_schema,table_name,column_name,domain_schema,domain_name FROM information_schema.columns WHERE domain_schema='hive' AND table_schema<>'hive'"
sudo -Enu "$DB_ADMIN" psql -w $POSTGRES_ACCESS -d "$DB_NAME" -v ON_ERROR_STOP=on -U "$DB_ADMIN" -q -t -A -c "$query" | \
awk -F'|' '{print($1"."$2, "contains column", $3, "of type", $4"."$5, "which would be dropped on upgrade")} END{exit NR > 0 ? 4 : 0}'
}
check_functions_were_updated() {
echo
echo "Checking that all C functions were properly updated..."
query="
SELECT p.proname,p.prosrc,p.probin
FROM pg_catalog.pg_proc AS p
JOIN pg_catalog.pg_namespace AS n ON p.pronamespace=n.oid
JOIN pg_catalog.pg_language AS l ON p.prolang=l.oid
WHERE n.nspname='hive' AND l.lanname='c' AND p.probin NOT LIKE '%-$COMMIT_NEW_ID.so'"
sudo -Enu "$DB_ADMIN" psql -w $POSTGRES_ACCESS -d "$DB_NAME" -v ON_ERROR_STOP=on -U "$DB_ADMIN" -q -t -A -c "$query" | \
awk -v "HASH=$COMMIT_NEW_ID" -F'|' '{print("Function", $1, "references", $2, "in", $3 ", but", HASH, "was expected")} END{exit NR > 0 ? 3 : 0}'
}
verify_table_schema
get_deployed_version
generate_final_update_script
make_update
check_tables_dont_reference_haf_types
check_tables_dont_reference_haf_domains
make_update
check_functions_were_updated
......@@ -767,7 +767,7 @@ sql_serializer_plugin::~sql_serializer_plugin() {}
void sql_serializer_plugin::set_program_options(appbase::options_description &cli, appbase::options_description &cfg)
{
cfg.add_options()("psql-url", boost::program_options::value<string>(), "postgres connection string")
("psql-index-threshold", appbase::bpo::value<uint32_t>()->default_value( 1'000'000 ), "indexes/constraints will be recreated if `psql_block_number + psql_index_threshold >= head_block_number`")
("psql-index-threshold", appbase::bpo::value<uint32_t>()->default_value( 20'000'000 ), "indexes/constraints will be recreated if `psql_block_number + psql_index_threshold >= head_block_number`")
("psql-operations-threads-number", appbase::bpo::value<uint32_t>()->default_value( 5 ), "number of threads which dump operations to database during reindexing")
("psql-transactions-threads-number", appbase::bpo::value<uint32_t>()->default_value( 2 ), "number of threads which dump transactions to database during reindexing")
("psql-account-operations-threads-number", appbase::bpo::value<uint32_t>()->default_value( 2 ), "number of threads which dump account operations to database during reindexing")
......
......@@ -14,6 +14,11 @@ MACRO( ADD_EXAMPLES_FUNCTIONAL_TESTS example_test_path)
MESSAGE( STATUS "Added functional examples tests '${test_target}'" )
ENDMACRO()
ADD_TEST( NAME test_update_script
COMMAND ${PROJECT_SOURCE_DIR}/tests/integration/functional/hive_fork_manager/test_update_script.sh --haf_binaries_dir=${CMAKE_BINARY_DIR}
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
)
ADD_SQL_FUNCTIONAL_TEST( context_rewind/schema_test.sql )
ADD_SQL_FUNCTIONAL_TEST( context_rewind/register_table_test.sql )
......
#!/usr/bin/env bash
#
# Check that hive_fork_manager_update_script_generator.sh fails when it's supposed to fail
#
set -eu -o pipefail
SCRIPTPATH="$(cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P)"
SCRIPTS_DIR="$SCRIPTPATH/../../../../scripts"
UPDATE_DB_NAME=update-db-test
POSTGRES_VERSION=14
export PGUSER="haf_admin"
export PGHOST="/var/run/postgresql"
export PGDATABASE="$UPDATE_DB_NAME"
print_help () {
echo "Usage: $0 [OPTION[=VALUE]]..."
echo
echo "OPTIONS:"
echo " --haf_binaries_dir=NAME"
echo " --help Display this help screen and exit"
echo
}
failswith() {(
# body runs inside subshell to disable set -e locally
set +e
expected_exit_code="$1"
shift
"$@"
actual_exit_code="$?"
if [ "$actual_exit_code" -ne "$expected_exit_code" ]; then
echo "TEST FAILED: expected to exit with $expected_exit_code, but exited with $actual_exit_code"
return 2
fi
)}
HAF_DIR=""
while [ $# -gt 0 ]; do
case "$1" in
--haf_binaries_dir=*)
HAF_DIR="${1#*=}"
;;
--help)
print_help
exit 0
;;
-*)
echo "ERROR: '$1' is not a valid option"
echo
print_help
exit 1
;;
*)
echo "ERROR: '$1' is not a valid argument"
echo
print_help
exit 1
;;
esac
shift
done
if [ "$HAF_DIR" = "" ]; then
echo "ERROR: --haf_binaries_dir is required option"
exit 1
fi
printf "\nTEST: Trying to upgrade from current database. It should pass, as nothing needs to be done.\n"
"$SCRIPTS_DIR/setup_db.sh" --haf-db-name="$UPDATE_DB_NAME"
sudo "$HAF_DIR/extensions/hive_fork_manager/hive_fork_manager_update_script_generator.sh" --haf-db-name="$UPDATE_DB_NAME"
printf "\nTEST: Creating table referencing hive.operation. This is allowed and should succeed.\n"
"$SCRIPTS_DIR/setup_db.sh" --haf-db-name="$UPDATE_DB_NAME"
sudo -Enu "$PGUSER" psql -w -d "$UPDATE_DB_NAME" -v ON_ERROR_STOP=on -q -t -A -c "create table good_table(id int, op hive.operation)"
sudo "$HAF_DIR/extensions/hive_fork_manager/hive_fork_manager_update_script_generator.sh" --haf-db-name="$UPDATE_DB_NAME"
printf "\nTEST: Creating table referencing disallowed HAF type. Upgrade should fail.\n"
"$SCRIPTS_DIR/setup_db.sh" --haf-db-name="$UPDATE_DB_NAME"
sudo -Enu "$PGUSER" psql -w -d "$UPDATE_DB_NAME" -v ON_ERROR_STOP=on -q -t -A -c "create table bad_table(id int, comment hive.comment_operation)"
failswith 4 sudo "$HAF_DIR/extensions/hive_fork_manager/hive_fork_manager_update_script_generator.sh" --haf-db-name="$UPDATE_DB_NAME"
printf "\nTEST: Creating table referencing disallowed HAF domain. Upgrade should fail.\n"
"$SCRIPTS_DIR/setup_db.sh" --haf-db-name="$UPDATE_DB_NAME"
sudo -Enu "$PGUSER" psql -w -d "$UPDATE_DB_NAME" -v ON_ERROR_STOP=on -q -t -A -c "create table bad_table(id int, account hive.account_name_type)"
failswith 4 sudo "$HAF_DIR/extensions/hive_fork_manager/hive_fork_manager_update_script_generator.sh" --haf-db-name="$UPDATE_DB_NAME"
printf "\nTEST: Check that function defined in hive namespace that doesn't reference current commit hash fails the upgrade.\n"
"$SCRIPTS_DIR/setup_db.sh" --haf-db-name="$UPDATE_DB_NAME"
sudo -Enu "$PGUSER" psql -w -d "$UPDATE_DB_NAME" -v ON_ERROR_STOP=on -q -t -A -c "CREATE FUNCTION hive.bad_function() RETURNS VOID VOLATILE AS '/lib/postgresql/${POSTGRES_VERSION}/lib/tablefunc.so', 'crosstab' language c;"
failswith 3 sudo "$HAF_DIR/extensions/hive_fork_manager/hive_fork_manager_update_script_generator.sh" --haf-db-name="$UPDATE_DB_NAME"
echo "Succeeded"