Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • hive/haf
  • dan/haf
2 results
Show changes
Commits on Source (7)
......@@ -277,6 +277,8 @@ COPY --from=build --chown=postgres:postgres "${HAF_SOURCE_DIR}/docker/pg_hba.con
COPY --from=build --chown=haf_admin:users "${HAF_SOURCE_DIR}/docker/cron_jobs.sql" .
# Reset HIVE_SUBDIR after copying files from build stage
ENV HIVE_SUBDIR=.
ENV DATADIR=/home/hived/datadir
# Use default location (inside datadir) of shm file. If SHM should be placed on some different device, then set it to mapped volume `/home/hived/shm_dir` and map it in docker run
ENV SHM_DIR=${DATADIR}/blockchain
......
......@@ -254,7 +254,7 @@ if sudo --user=postgres -n [ ! -d "$PGDATA" -o ! -f "$PGDATA/PG_VERSION" ]; then
echo "Attempting to setup postgres instance: running setup_postgres.sh..."
sudo -n "/home/haf_admin/source/${HIVE_SUBDIR}/scripts/setup_postgres.sh" --haf-admin-account=haf_admin --haf-binaries-dir="/home/haf_admin/build" --haf-database-store="/home/hived/datadir/haf_db_store/tablespace" --install-extension=${HAF_INSTALL_EXTENSION:-yes}
sudo -n "/home/haf_admin/source/${HIVE_SUBDIR}/scripts/setup_postgres.sh" --haf-admin-account=haf_admin --haf-binaries-dir="/home/haf_admin/build" --haf-database-store="/home/hived/datadir/haf_db_store/tablespace" --install-extension="${HAF_INSTALL_EXTENSION:-"yes"}"
echo "Postgres instance setup completed."
......@@ -265,7 +265,7 @@ else
echo "Attempting to setup postgres instance already containing HAF database..."
# in case when container is restarted over already existing (and potentially filled) data directory, we need to be sure that docker-internal postgres has deployed HFM extension
sudo -n "/home/haf_admin/source/${HIVE_SUBDIR}/scripts/setup_postgres.sh" --haf-admin-account=haf_admin --haf-binaries-dir="/home/haf_admin/build" --haf-database-store="/home/hived/datadir/haf_db_store/tablespace" --install-extension=${HAF_INSTALL_EXTENSION:-yes}
sudo -n "/home/haf_admin/source/${HIVE_SUBDIR}/scripts/setup_postgres.sh" --haf-admin-account=haf_admin --haf-binaries-dir="/home/haf_admin/build" --haf-database-store="/home/hived/datadir/haf_db_store/tablespace" --install-extension="${HAF_INSTALL_EXTENSION:-"yes"}"
sudo -n "/usr/share/postgresql/${POSTGRES_VERSION}/extension/hive_fork_manager_update_script_generator.sh" --haf-admin-account=haf_admin --haf-db-name=haf_block_log
echo "Postgres instance setup completed."
......
......@@ -119,7 +119,6 @@ max_connections = 100 # (change requires restart)
# - Memory -
shared_buffers = 128MB # min 128kB
# (change requires restart)
#huge_pages = try # on, off, or try
# (change requires restart)
......@@ -222,8 +221,8 @@ dynamic_shared_memory_type = posix # the default is the first option
# - Checkpoints -
#checkpoint_timeout = 5min # range 30s-1d
max_wal_size = 1GB
min_wal_size = 80MB
max_wal_size = 12GB
min_wal_size = 1GB
#checkpoint_completion_target = 0.5 # checkpoint target duration, 0.0 - 1.0
#checkpoint_flush_after = 256kB # measured in pages, 0 disables
#checkpoint_warning = 30s # 0 disables
......@@ -788,11 +787,11 @@ external_pid_file = '/var/run/postgresql/17-main.pid' # write an extra PID fil
idle_in_transaction_session_timeout=60000 #ms
shared_buffers = 16GB
effective_cache_size = 8GB
effective_cache_size = 16GB
maintenance_work_mem = 4GB
work_mem = 1024MB
work_mem = 64MB
checkpoint_completion_target = 0.8
checkpoint_completion_target = 0.9
checkpoint_timeout = 60min
wal_buffers = 512MB
......@@ -807,6 +806,10 @@ max_wal_senders = 0
# Set to 4 to build indexes faster
max_parallel_maintenance_workers = 4
# these have to be set in postgresql.conf, cannot be set later
autovacuum_max_workers = 5
autovacuum_vacuum_cost_limit = 5000
# Allows overriding above configuration settings
# include files ending in '.conf' from
......
Subproject commit 74eb54442330ace71c37a43b464aee6b1bd4dae2
Subproject commit a74bad9a396b3e773a554a847dc3b72c3f7ee98d
include:
- project: 'hive/hive'
ref: 2afff0e42851ad0b58428abd690af9e648bf4745 #develop
ref: b2f0703551880c36797da6382fb1ee227877dedd #develop
file: '/scripts/ci-helpers/prepare_data_image_job.yml'
.prepare_haf_image:
......
......@@ -7,7 +7,6 @@ source "$SCRIPTPATH/common.sh"
log_exec_params "$@"
print_help () {
echo "Usage: $0 [OPTION[=VALUE]]..."
echo
......@@ -16,7 +15,8 @@ print_help () {
echo " --host=VALUE Specify postgreSQL host location (defaults to /var/run/postgresql)."
echo " --port=NUMBER Specify a postgreSQL operating port (defaults to 5432)."
echo " --postgres-url=URL Specify postgreSQL connection url directly."
echo " --haf-app-account=NAME Specify an account name to be added to the 'hive_applications_owner_group' group."
echo " --haf-app-account=NAME Specify an account name to be added to the base group."
echo " --base-group=GROUP Specify the base group (defaults to hive_applications_owner_group)."
echo " --public Enable query_supervisor limiting for the haf_app_account."
echo " --help Display this help screen and exit."
echo
......@@ -27,7 +27,7 @@ create_haf_app_account() {
local haf_app_account="$2"
local is_public="$3"
local base_group="hive_applications_owner_group"
local base_group="$BASE_GROUP"
local alter_to_public=""
$is_public && alter_to_public="ALTER ROLE ${haf_app_account} SET query_supervisor.limits_enabled TO true;"
......@@ -35,7 +35,7 @@ create_haf_app_account() {
DO \$$
BEGIN
BEGIN
CREATE ROLE $haf_app_account WITH LOGIN INHERIT IN ROLE hive_applications_owner_group;
CREATE ROLE $haf_app_account WITH LOGIN INHERIT IN ROLE ${base_group};
EXCEPTION WHEN DUPLICATE_OBJECT THEN
RAISE NOTICE '$haf_app_account role already exists';
END;
......@@ -47,13 +47,15 @@ EOF
}
# Default values for variables
HAF_APP_ACCOUNT=""
POSTGRES_HOST="/var/run/postgresql"
POSTGRES_PORT=5432
POSTGRES_URL=""
PUBLIC=false
BASE_GROUP="hive_applications_owner_group"
# Parse command line arguments
while [ $# -gt 0 ]; do
case "$1" in
--host=*)
......@@ -68,6 +70,9 @@ while [ $# -gt 0 ]; do
--haf-app-account=*)
HAF_APP_ACCOUNT="${1#*=}"
;;
--base-group=*)
BASE_GROUP="${1#*=}"
;;
--public)
PUBLIC=true
;;
......@@ -87,8 +92,8 @@ while [ $# -gt 0 ]; do
print_help
exit 2
;;
esac
shift
esac
shift
done
if [ -z "$POSTGRES_URL" ]; then
......@@ -97,9 +102,9 @@ else
POSTGRES_ACCESS=$POSTGRES_URL
fi
_TST_HAF_APP_ACCOUNT=${HAF_APP_ACCOUNT:? "Missing application account name - it should be specified by using `--haf-app-account=name` option"}
# Ensure that the haf app account is specified
_TST_HAF_APP_ACCOUNT=${HAF_APP_ACCOUNT:? "Missing application account name - it should be specified by using '--haf-app-account=name' option"}
echo $POSTGRES_ACCESS
create_haf_app_account "$POSTGRES_ACCESS" "$HAF_APP_ACCOUNT" ${PUBLIC}
......@@ -565,15 +565,3 @@ END;
$BODY$
;
CREATE OR REPLACE FUNCTION hive.test()
RETURNS VOID
LANGUAGE plpgsql
IMMUTABLE
AS
$BODY$
BEGIN
RETURN;
END;
$BODY$
;
#! /bin/bash
#! /bin/bash -x
set -euo pipefail
......@@ -64,6 +64,13 @@ done
test_extension_update() {
# copy sources to build directory, they will be modified there to create real new version of hfm extension
COPY_SRC_PATH="${HAF_DIR}/src_copy"
COPY_BUILD_PATH="${HAF_DIR}/src_copy/build"
rm -rf ${COPY_SRC_PATH}
mkdir -p ${COPY_SRC_PATH}
mkdir -p ${COPY_BUILD_PATH}
cp -a ${DIR}/. ${COPY_SRC_PATH}
POSTGRES_VERSION=17
echo "Add function testfun to schema hive"
......@@ -72,18 +79,17 @@ test_extension_update() {
# old libhfm has to be removed so in case of an corrupted setup of haf the old libhfm won't be used
sudo rm -rf /usr/lib/postgresql/${POSTGRES_VERSION}/lib/libhfm-*
# modify the hived_api.sql file
echo -e "CREATE OR REPLACE FUNCTION hive.test() \n RETURNS void \n LANGUAGE plpgsql \n VOLATILE AS \n\$BODY\$ \nBEGIN \nRAISE NOTICE 'test'; \nEND; \n\$BODY\$;" >> $DIR/src/hive_fork_manager/hived_api.sql
# modify the hived_api.sql file, new function test added to the new version of hfm
echo -e "CREATE OR REPLACE FUNCTION hive.test() \n RETURNS void \n LANGUAGE plpgsql \n VOLATILE AS \n\$BODY\$ \nBEGIN \nRAISE NOTICE 'test'; \nEND; \n\$BODY\$;" >> "${COPY_SRC_PATH}/src/hive_fork_manager/hived_api.sql"
# commit changes to make a new hash
git -C $DIR config --global user.name "abc"
git -C $DIR config --global user.email "abc@example.com"
git -C $DIR config --global --add safe.directory /builds/hive/haf
git -C $DIR add src/hive_fork_manager/hived_api.sql
git -C $DIR commit -m "test"
# rebuild haf
test -n "$HAF_DIR" && rm "$HAF_DIR"/* -rf
$SETUP_DIR/build.sh --cmake-arg="-DHIVE_LINT=OFF" --haf-source-dir="$DIR" --haf-binaries-dir="$HAF_DIR" extension.hive_fork_manager
(cd $HAF_DIR; sudo ninja install)
git -C ${COPY_BUILD_PATH} config --global user.name "abc"
git -C ${COPY_BUILD_PATH} config --global user.email "abc@example.com"
git -C ${COPY_BUILD_PATH} config --global --add safe.directory /builds/hive/haf
git -C ${COPY_BUILD_PATH} add "${COPY_SRC_PATH}/src/hive_fork_manager/hived_api.sql"
git -C ${COPY_BUILD_PATH} commit -m "test"
# rebuild copy of haf
${COPY_SRC_PATH}/scripts/build.sh --cmake-arg="-DHIVE_LINT=OFF" --haf-source-dir="${COPY_SRC_PATH}" --haf-binaries-dir="${COPY_BUILD_PATH}" extension.hive_fork_manager
(cd ${COPY_BUILD_PATH}; sudo ninja install)
# run generator script
sudo /usr/share/postgresql/${POSTGRES_VERSION}/extension/hive_fork_manager_update_script_generator.sh
......@@ -101,7 +107,19 @@ test_extension_update() {
END
\$\$ LANGUAGE plpgsql;"
# check if function test added in new hfm version exists
sudo -Enu "$DB_ADMIN" psql -d "$DB_NAME" -v ON_ERROR_STOP=on -U "$DB_ADMIN" -c "
DO \$\$
BEGIN
ASSERT EXISTS (
SELECT 1
FROM pg_proc
JOIN pg_namespace ON pg_proc.pronamespace = pg_namespace.oid
WHERE pg_proc.proname = 'test'
AND pg_namespace.nspname = 'hive'
), 'Function hive.test() not exists when it should not.';
END
\$\$ LANGUAGE plpgsql;"
}
test_extension_update
......