Skip to content
Snippets Groups Projects

issue #223: correctly detect if reversible data have to be removed after hived restart

Closed issue #223: correctly detect if reversible data have to be removed after hived restart
Closed Marcin requested to merge mi_investigate_restart_with_app into develop
Compare and Show latest version
1 file
+ 606
0
Compare changes
  • Side-by-side
  • Inline
+ 606
0
@@ -54,7 +54,71 @@ haf_image_build:
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-binaries"
HIVE_NETWORK_TYPE: mainnet
haf_image_build_testnet:
extends: .haf_image_build
variables:
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-testnet-binaries"
HIVE_NETWORK_TYPE: testnet
haf_image_build_mirrornet:
extends: .haf_image_build
variables:
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-mirrornet-binaries"
HIVE_NETWORK_TYPE: mirrornet
.haf-service: &haf-service
name: $HAF_IMAGE_NAME
alias: haf-instance
variables:
# Allow access from any network to eliminate CI IP addressing problems when hfm runs as service
PG_ACCESS: |
"host all haf_admin 0.0.0.0/0 trust"
DATA_SOURCE: "${DATA_CACHE_HAF_PREFIX}_${HAF_COMMIT}"
LOG_FILE: $CI_JOB_NAME.log
command: ["--replay-blockchain", "--stop-replay-at-block=5000000"]
.hfm-only-service: &hfm-only-service
name: $HAF_IMAGE_NAME
alias: hfm-only-instance
variables:
# Allow access from any network to eliminate CI IP addressing problems when hfm runs as service
PG_ACCESS: |
"host all haf_admin 0.0.0.0/0 trust"
"host all test_app_owner 0.0.0.0/0 trust"
"host all hived 0.0.0.0/0 trust"
"host all all 0.0.0.0/0 scram-sha-256"
command: [ "--execute-maintenance-script=${HAF_SOURCE_DIR}/scripts/maintenance-scripts/sleep_infinity.sh" ]
.hfm_functional_tests:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_1
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_hfm_functional_tests.sh
artifacts:
paths:
- "**/*.log"
interruptible: true
tags:
- public-runner-docker
- hived
hfm_functional_tests:
extends: .hfm_functional_tests
needs:
- job: haf_image_build
artifacts: true
hfm_functional_tests_testnet:
extends: .hfm_functional_tests
needs:
- job: haf_image_build_testnet
artifacts: true
.pytest_based:
extends: .job-defaults
@@ -66,7 +130,337 @@ haf_image_build:
(cd $CI_PROJECT_DIR/tests/integration/haf-local-tools && poetry install)
echo -e "\e[0Ksection_end:$(date +%s):python_venv\r\e[0K"
haf_system_tests:
extends: .job-defaults
stage: build_and_test_phase_2
needs:
- job: haf_image_build_testnet
artifacts: true
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_haf_system_tests.sh
artifacts:
paths:
- "haf_system_tests.log"
- "**/generated_during_*"
- "**/generated_by_package_fixtures"
exclude:
- "**/generated_during_*/**/block_log"
- "**/generated_during_*/**/block_log.artifacts"
- "**/generated_during_*/**/shared_memory.bin"
- "**/generated_during_*/**/*.sst"
reports:
junit: tests/integration/system/haf/report.xml
when: always
expire_in: 1 week
interruptible: true
tags:
- public-runner-docker
dump_snapshot_5m_mirrornet:
extends: .job-defaults
stage: build_and_test_phase_1
needs:
- job: haf_image_build_mirrornet
artifacts: true
image: "$CI_REGISTRY_IMAGE/ci-base-image$BUILDER_IMAGE_TAG"
variables:
MIRRORNET_WORKING_DIR: "$CI_PROJECT_DIR/mirrornet_witness_node"
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-mirrornet-binaries"
HIVED_PATH: "$BINARY_CACHE_PATH/hived"
script:
#Prepare environment for hived run
- cd $CI_PROJECT_DIR/docker
- mkdir $MIRRORNET_WORKING_DIR
- cd $MIRRORNET_WORKING_DIR
- mkdir blockchain
- cd blockchain
- cp $BLOCK_LOG_SOURCE_DIR_MIRRORNET_5M/block_log .
#Prepare snapshot storage
- mkdir $SNAPSHOTS_PATH
- cd $SNAPSHOTS_PATH
- mkdir 5m_mirrornet
#Prepare snapshot
- $HIVED_PATH -d $MIRRORNET_WORKING_DIR --exit-before-sync --replay
- echo "plugin = state_snapshot" >> $MIRRORNET_WORKING_DIR/config.ini
- $HIVED_PATH -d $MIRRORNET_WORKING_DIR --dump-snapshot=snapshot --exit-before-sync
#Store snapshot in cache
- mv $MIRRORNET_WORKING_DIR/blockchain $SNAPSHOTS_PATH/5m_mirrornet
- mv $MIRRORNET_WORKING_DIR/snapshot/snapshot $SNAPSHOTS_PATH/5m_mirrornet
tags:
- data-cache-storage
haf_system_tests_mirrornet:
stage: build_and_test_phase_2
extends: .pytest_based
timeout: 2h
needs:
- job: haf_image_build_mirrornet
artifacts: true
- job: dump_snapshot_5m_mirrornet
artifacts: false
image: "$CI_REGISTRY_IMAGE/ci-base-image$BUILDER_IMAGE_TAG"
services:
- *hfm-only-service
variables:
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-mirrornet-binaries"
HIVED_PATH: "$$BINARY_CACHE_PATH/hived"
COMPRESS_BLOCK_LOG_PATH: "$BINARY_CACHE_PATH/compress_block_log"
BLOCK_LOG_UTIL_PATH: "$BINARY_CACHE_PATH/block_log_util"
GET_DEV_KEY_PATH: "$BINARY_CACHE_PATH/get_dev_key"
CLI_WALLET_PATH: "$BINARY_CACHE_PATH/cli_wallet"
DB_NAME: haf_block_log
DB_URL: "postgresql://haf_admin@hfm-only-instance:5432/$DB_NAME"
script:
# check that postgres service is ready
- psql "$DB_URL" -c "SELECT 1"
- mkdir $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests/tmp_block_log
# This cp and call to compress_block_log is to keep backcompatibility with pipelines on master branch (where still 1.27.X line is present until next HF will apply),
# where nodes does not support new format of block_log.artifacts file;
# Issue: https://gitlab.syncad.com/hive/haf/-/issues/151
# copy block_log to tmp location
- cp $BLOCK_LOG_SOURCE_DIR_MIRRORNET_5M/block_log $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests/tmp_block_log
# copy block_log and generate new artifacts with compress_block_log util
- time $COMPRESS_BLOCK_LOG_PATH --input-block-log $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests/tmp_block_log/block_log --output-block-log $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests/block_log --decompress
# drop tmp location
- rm -r $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests/tmp_block_log
# prepare environment and run tests
- cd $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests
- pytest --junitxml report.xml --timeout=3600 --block-log-path=$CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests/block_log --snapshot-path=$SNAPSHOTS_PATH/5m_mirrornet/snapshot -n ${PYTEST_NUMBER_OF_PROCESSES} -m mirrornet
artifacts:
paths:
- "**/generated_during_*"
- "**/generated_by_package_fixtures"
exclude:
- "**/generated_during_*/**/block_log"
- "**/generated_during_*/**/block_log.artifacts"
- "**/generated_during_*/**/shared_memory.bin"
- "**/generated_during_*/**/*.sst"
reports:
junit: tests/integration/system/haf/mirrornet_tests/report.xml
when: always
expire_in: 1 week
interruptible: true
tags:
- data-cache-storage
applications_system_tests:
extends: .job-defaults
stage: build_and_test_phase_2
needs:
- job: haf_image_build_testnet
artifacts: true
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_application_system_tests.sh
artifacts:
paths:
- "applications_system_tests.log"
- "**/generated_during_*"
- "**/generated_by_package_fixtures"
exclude:
- "**/generated_during_*/**/block_log"
- "**/generated_during_*/**/block_log.artifacts"
- "**/generated_during_*/**/shared_memory.bin"
- "**/generated_during_*/**/*.sst"
reports:
junit: tests/integration/system/applications/report.xml
when: always
expire_in: 1 week
interruptible: true
tags:
- public-runner-docker
.replay_step:
extends: .job-defaults
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
image: "$CI_REGISTRY_IMAGE/ci-base-image$BUILDER_IMAGE_TAG"
services:
- *hfm-only-service
variables:
PATTERNS_PATH: "" #Should be overriden in derived jobs
RO_BLOCK_LOG_DIRECTORY: "/blockchain/block_log_5m/"
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-testnet-binaries"
HIVED_PATH: "$CI_PROJECT_DIR/haf-binaries/hived"
DB_NAME: haf_block_log
DB_ADMIN: "haf_admin"
POSTGRES_HOST: "hfm-only-instance"
POSTGRES_PORT: "5432"
DB_URL: "postgresql://haf_admin@hfm-only-instance:5432/haf_block_log"
REPLAY: "--force-replay"
before_script:
- !reference [.pytest_based, before_script]
- ls -lath $CI_PROJECT_DIR
# replay
- |
echo -e "\e[0Ksection_start:$(date +%s):replay[collapsed=true]\r\e[0KExecuting replay..."
test -n "$PATTERNS_PATH"
mkdir -vp "${PATTERNS_PATH}/blockchain"
cp "${RO_BLOCK_LOG_DIRECTORY}/block_log" "${PATTERNS_PATH}/blockchain/"
cd $CI_PROJECT_DIR/tests/integration/replay
$HIVED_PATH --data-dir $PATTERNS_PATH $REPLAY --exit-before-sync --psql-url $DB_URL 2>&1 | tee -i node_logs.log
echo -e "\e[0Ksection_end:$(date +%s):replay\r\e[0K"
script:
- pytest --junitxml report.xml
artifacts:
paths:
- "**/node_logs.log"
- "**/node_logs1.log"
- "**/generated_during_*"
- "**/generated_by_package_fixtures"
- "**/*.out.csv"
reports:
junit: tests/integration/replay/report.xml
when: always
expire_in: 1 week
interruptible: true
tags:
- public-runner-docker
- hived-for-tests
replay_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/no_filter"
replay_with_haf_from_4_9m:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns_4_9m/no_filter"
replay_accounts_filtered_with_haf_from_4_9m:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns_4_9m/accounts_filtered"
replay_accounts_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_filtered"
replay_accounts_operations_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_operations_filtered"
replay_virtual_operations_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/virtual_operations_filtered"
replay_operations_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/operations_filtered"
replay_body_operations_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/body_operations_filtered"
replay_accounts_body_operations_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_body_operations_filtered"
replay_with_update:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_replay_with_update.sh
artifacts:
paths:
- "replay_with_update.log"
- "node_logs.log"
- "node_logs1.log"
tags:
- public-runner-docker
- hived-for-tests
replay_with_keyauths:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/tests/integration/state_provider/run_replay_with_keyauth.sh
artifacts:
paths:
- "replay_with_keyauths.log"
- "node_logs.log"
- "node_logs1.log"
tags:
- public-runner-docker
- hived-for-tests
replay_with_json_metadata:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/tests/integration/state_provider/run_replay_with_json_metadata.sh
artifacts:
paths:
- "replay_with_json_metadata.log"
- "node_logs.log"
- "node_logs1.log"
tags:
- public-runner-docker
- hived-for-tests
replay_with_app:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_live_replay_with_app.sh
artifacts:
paths:
- "replay_with_app.log"
- "node_logs.log"
- "node_logs1.log"
tags:
- public-runner-docker
- hived-for-tests
replay_restarts_with_app:
extends: .job-defaults
@@ -88,7 +482,165 @@ replay_restarts_with_app:
- public-runner-docker
- hived-for-tests
replay_with_restart:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_replay_with_breaks.sh
artifacts:
paths:
- "node_logs.log"
tags:
- public-runner-docker
- hived-for-tests
update_with_wrong_table_schema:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_update_with_wrong_table_schema.sh
artifacts:
paths:
- "update_with_wrong_table_schema.log"
- "node_logs.log"
tags:
- public-runner-docker
- hived-for-tests
block_api_tests:
extends: .replay_step
image: $CI_REGISTRY_IMAGE/ci-base-image:ubuntu22.04-8-jmeter
needs:
- job: haf_image_build
artifacts: true
variables:
FF_NETWORK_PER_BUILD: 1
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_body_operations_filtered"
BENCHMARK_DIR: "$CI_PROJECT_DIR/hive/tests/python/hive-local-tools/tests_api/benchmarks"
script:
# setup
- |
echo -e "\e[0Ksection_start:$(date +%s):blocks_api_test_setup[collapsed=true]\r\e[0KSetting up blocks api tests..."
psql $DB_URL -c "CREATE ROLE bench LOGIN PASSWORD 'mark' INHERIT IN ROLE hived_group;"
export BENCHMARK_DB_URL="postgresql://bench:mark@hfm-only-instance:5432/$DB_NAME"
echo -e "\e[0Ksection_end:$(date +%s):blocks_api_test_setup\r\e[0K"
# run pattern tests
- |
echo -e "\e[0Ksection_start:$(date +%s):blocks_api_test[collapsed=true]\r\e[0KRunning blocks api tests..."
cd "$BENCHMARK_DIR"
python3 benchmark.py --loops 200 --threads 5 -n blocks_api -p 5432 -c perf_5M_light.csv --skip-version-check -d wdir --postgres $BENCHMARK_DB_URL --call-style postgres 2>&1 | tee -i $CI_PROJECT_DIR/python_benchmark.log
echo -e "\e[0Ksection_end:$(date +%s):blocks_api_test\r\e[0K"
# generate JUNIT report file
- |
echo -e "\e[0Ksection_start:$(date +%s):report[collapsed=true]\r\e[0KGenerating Junit report..."
m2u --input "$BENCHMARK_DIR/wdir/raw_jmeter_report.xml" --output $CI_PROJECT_DIR/report.junit
echo -e "\e[0Ksection_end:$(date +%s):report\r\e[0K"
artifacts:
name: "$CI_JOB_NAME-$CI_COMMIT_REF_NAME"
reports:
junit: $CI_PROJECT_DIR/report.junit
paths:
- $BENCHMARK_DIR/wdir
- $CI_PROJECT_DIR/python_benchmark.log
when: always
expire_in: 1 week
tags:
- public-runner-docker
- hived-for-tests
prepare_haf_data:
extends: .prepare_haf_data_5m
needs:
- job: haf_image_build
artifacts: true
stage: build_and_test_phase_1
variables:
HIVE_NETWORK_TYPE: mainnet
BLOCK_LOG_SOURCE_DIR: "$BLOCK_LOG_SOURCE_DIR_5M"
CONFIG_INI_SOURCE: "$CI_PROJECT_DIR/docker/config_5M.ini"
tags:
- data-cache-storage
dead_app_auto_detach:
extends: .job-defaults
stage: build_and_test_phase_2
image: "$CI_REGISTRY_IMAGE/ci-base-image$BUILDER_IMAGE_TAG"
needs:
- job: prepare_haf_data
artifacts: true
services:
- name: $HAF_IMAGE_NAME
alias: haf-instance
variables:
# Allow access from any network to eliminate CI IP addressing problems when hfm runs as service
PG_ACCESS: |
"host all haf_admin 0.0.0.0/0 trust"
"host all hived 0.0.0.0/0 trust"
"host all test_app_owner 0.0.0.0/0 trust"
DATA_SOURCE: "${DATA_CACHE_HAF_PREFIX}_${HAF_COMMIT}"
LOG_FILE: $CI_JOB_NAME.log
command: ["--replay-blockchain", "--stop-replay-at-block=1000000"]
variables:
HIVED_UID: $HIVED_UID
HAF_COMMIT: $HAF_COMMIT
script:
- $CI_PROJECT_DIR/tests/integration/system/applications/auto_detaching/scenario1.sh "haf-instance"
artifacts:
paths:
- scenario*.log
tags:
- data-cache-storage
start_haf_as_service:
extends: .job-defaults
stage: build_and_test_phase_2
image: "$CI_REGISTRY_IMAGE/ci-base-image$BUILDER_IMAGE_TAG"
needs:
- job: prepare_haf_data
artifacts: true
services:
- *haf-service
variables:
HAF_POSTGRES_URL: postgresql://haf_admin@haf-instance:5432/haf_block_log
HIVED_UID: $HIVED_UID
HAF_COMMIT: $HAF_COMMIT
script:
- curl -I haf-instance:8091 || (echo "error connecting to service hived-instance" && false)
- |
curl -XPOST -d '{
"jsonrpc": "2.0",
"method": "database_api.get_dynamic_global_properties",
"params": {
},
"id": 2
}' haf-instance:8091
after_script:
- rm docker_entrypoint.log -f
- cp "${DATA_CACHE_HAF_PREFIX}_${HAF_COMMIT}/datadir/$CI_JOB_NAME.log" "$CI_PROJECT_DIR/docker_entrypoint.log"
artifacts:
paths:
- docker_entrypoint.log
tags:
- data-cache-storage
cleanup_haf_cache_manual:
extends: .cleanup_cache_manual_template
@@ -114,3 +666,57 @@ cleanup_old_haf_cache:
tags:
- data-cache-storage
build_and_publish_image:
stage: publish
extends: .publish_docker_image_template
before_script:
- !reference [.publish_docker_image_template, before_script]
script:
- scripts/ci-helpers/build_and_publish_instance.sh
tags:
- public-runner-docker
- hived-for-tests
double_haf_replay_tests:
extends: .docker_image_builder_job
stage: build_and_test_phase_2
variables:
CONFIG_INI_SOURCE: "$CI_PROJECT_DIR/docker/config_5M.ini"
needs:
- job: haf_image_build
artifacts: true
script:
- docker container ps -a
- docker images
- docker context ls
tags:
- public-runner-docker
- hived-for-tests
op_body_filter_tests:
extends: .job-defaults
stage: build_and_test_phase_2
needs:
- job: haf_image_build_testnet
artifacts: true
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_op_body_filter_tests.sh
artifacts:
paths:
- "op_body_filter_tests.log"
- "**/generated_during_*"
- "**/generated_by_package_fixtures"
exclude:
- "**/generated_during_*/**/block_log"
- "**/generated_during_*/**/block_log.artifacts"
- "**/generated_during_*/**/*.sst"
reports:
junit: tests/integration/tools/op_body_filter/report.xml
when: always
expire_in: 1 week
interruptible: true
tags:
- public-runner-docker
Loading