Skip to content
Snippets Groups Projects

test execution does not modify sources

Merged Marcin requested to merge mi/problem_with_test_modified_sql into develop
1 file
+ 2
569
Compare changes
  • Side-by-side
  • Inline
+ 2
569
@@ -57,17 +57,7 @@ haf_image_build:
@@ -57,17 +57,7 @@ haf_image_build:
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-binaries"
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-binaries"
HIVE_NETWORK_TYPE: mainnet
HIVE_NETWORK_TYPE: mainnet
haf_image_build_testnet:
extends: .haf_image_build
variables:
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-testnet-binaries"
HIVE_NETWORK_TYPE: testnet
haf_image_build_mirrornet:
extends: .haf_image_build
variables:
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-mirrornet-binaries"
HIVE_NETWORK_TYPE: mirrornet
.haf-service: &haf-service
.haf-service: &haf-service
name: $HAF_IMAGE_NAME
name: $HAF_IMAGE_NAME
@@ -111,17 +101,9 @@ haf_image_build_mirrornet:
@@ -111,17 +101,9 @@ haf_image_build_mirrornet:
- public-runner-docker
- public-runner-docker
- hived
- hived
hfm_functional_tests:
extends: .hfm_functional_tests
needs:
- job: haf_image_build
artifacts: true
hfm_functional_tests_testnet:
extends: .hfm_functional_tests
needs:
- job: haf_image_build_testnet
artifacts: true
.pytest_based:
.pytest_based:
extends: .job-defaults
extends: .job-defaults
@@ -133,155 +115,7 @@ hfm_functional_tests_testnet:
@@ -133,155 +115,7 @@ hfm_functional_tests_testnet:
(cd $CI_PROJECT_DIR/tests/integration/haf-local-tools && poetry install)
(cd $CI_PROJECT_DIR/tests/integration/haf-local-tools && poetry install)
echo -e "\e[0Ksection_end:$(date +%s):python_venv\r\e[0K"
echo -e "\e[0Ksection_end:$(date +%s):python_venv\r\e[0K"
haf_system_tests:
extends: .job-defaults
stage: build_and_test_phase_2
needs:
- job: haf_image_build_testnet
artifacts: true
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_haf_system_tests.sh
artifacts:
paths:
- "haf_system_tests.log"
- "**/generated_during_*"
- "**/generated_by_package_fixtures"
exclude:
- "**/generated_during_*/**/block_log"
- "**/generated_during_*/**/block_log.artifacts"
- "**/generated_during_*/**/shared_memory.bin"
- "**/generated_during_*/**/*.sst"
reports:
junit: tests/integration/system/haf/report.xml
when: always
expire_in: 1 week
interruptible: true
tags:
- public-runner-docker
dump_snapshot_5m_mirrornet:
extends: .job-defaults
stage: build_and_test_phase_1
needs:
- job: haf_image_build_mirrornet
artifacts: true
image: "$CI_REGISTRY_IMAGE/ci-base-image$BUILDER_IMAGE_TAG"
variables:
MIRRORNET_WORKING_DIR: "$CI_PROJECT_DIR/mirrornet_witness_node"
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-mirrornet-binaries"
HIVED_PATH: "$BINARY_CACHE_PATH/hived"
script:
#Prepare environment for hived run
- cd $CI_PROJECT_DIR/docker
- mkdir -vp $MIRRORNET_WORKING_DIR
- cd $MIRRORNET_WORKING_DIR
- mkdir blockchain
- cd blockchain
- cp $BLOCK_LOG_SOURCE_DIR_MIRRORNET_5M/block_log .
#Prepare snapshot storage
- mkdir -vp "$SNAPSHOTS_PATH"
- cd "$SNAPSHOTS_PATH"
- mkdir -vp 5m_mirrornet/snapshot
#Prepare snapshot
- $HIVED_PATH -d $MIRRORNET_WORKING_DIR --exit-before-sync --replay --block-log-split=-1
- echo "plugin = state_snapshot" >> $MIRRORNET_WORKING_DIR/config.ini
- $HIVED_PATH -d $MIRRORNET_WORKING_DIR --dump-snapshot=snapshot --exit-before-sync --block-log-split=-1
#Store snapshot in cache
- mv $MIRRORNET_WORKING_DIR/blockchain "$SNAPSHOTS_PATH/5m_mirrornet"
- mv $MIRRORNET_WORKING_DIR/snapshot/snapshot "$SNAPSHOTS_PATH/5m_mirrornet/snapshot"
tags:
- data-cache-storage
haf_system_tests_mirrornet:
stage: build_and_test_phase_2
extends: .pytest_based
timeout: 2h
needs:
- job: haf_image_build_mirrornet
artifacts: true
- job: dump_snapshot_5m_mirrornet
artifacts: false
image: "$CI_REGISTRY_IMAGE/ci-base-image$BUILDER_IMAGE_TAG"
services:
- *hfm-only-service
variables:
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-mirrornet-binaries"
HIVED_PATH: "$$BINARY_CACHE_PATH/hived"
COMPRESS_BLOCK_LOG_PATH: "$BINARY_CACHE_PATH/compress_block_log"
BLOCK_LOG_UTIL_PATH: "$BINARY_CACHE_PATH/block_log_util"
GET_DEV_KEY_PATH: "$BINARY_CACHE_PATH/get_dev_key"
CLI_WALLET_PATH: "$BINARY_CACHE_PATH/cli_wallet"
DB_NAME: haf_block_log
DB_URL: "postgresql://haf_admin@hfm-only-instance:5432/$DB_NAME"
script:
# check that postgres service is ready
- psql "$DB_URL" -c "SELECT 1"
- mkdir $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests/tmp_block_log
# This cp and call to compress_block_log is to keep backcompatibility with pipelines on master branch (where still 1.27.X line is present until next HF will apply),
# where nodes does not support new format of block_log.artifacts file;
# Issue: https://gitlab.syncad.com/hive/haf/-/issues/151
# copy block_log to tmp location
- cp $BLOCK_LOG_SOURCE_DIR_MIRRORNET_5M/block_log $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests/tmp_block_log
# copy block_log and generate new artifacts with compress_block_log util
- time $COMPRESS_BLOCK_LOG_PATH --input-block-log $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests/tmp_block_log/block_log --output-block-log $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests/block_log --decompress
# drop tmp location
- rm -r $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests/tmp_block_log
# prepare environment and run tests
- cd $CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests && ls -la
- pytest -vvv --junitxml report.xml --timeout=3600 --block-log-dir-path=$CI_PROJECT_DIR/tests/integration/system/haf/mirrornet_tests --snapshot-path="$SNAPSHOTS_PATH/5m_mirrornet/snapshot/snapshot" -n ${PYTEST_NUMBER_OF_PROCESSES} -m mirrornet
- du -Sah . | sort -rh
artifacts:
paths:
- "**/generated_during_*"
- "**/generated_by_package_fixtures"
exclude:
- "**/generated_during_*/**/block_log"
- "**/generated_during_*/**/block_log.artifacts"
- "**/generated_during_*/**/shared_memory.bin"
- "**/generated_during_*/**/*.sst"
reports:
junit: tests/integration/system/haf/mirrornet_tests/report.xml
when: always
expire_in: 1 week
interruptible: true
tags:
- data-cache-storage
applications_system_tests:
extends: .job-defaults
stage: build_and_test_phase_2
needs:
- job: haf_image_build_testnet
artifacts: true
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_application_system_tests.sh
artifacts:
paths:
- "applications_system_tests.log"
- "**/generated_during_*"
- "**/generated_by_package_fixtures"
exclude:
- "**/generated_during_*/**/block_log"
- "**/generated_during_*/**/block_log.artifacts"
- "**/generated_during_*/**/shared_memory.bin"
- "**/generated_during_*/**/*.sst"
reports:
junit: tests/integration/system/applications/report.xml
when: always
expire_in: 1 week
interruptible: true
tags:
- public-runner-docker
.replay_step:
.replay_step:
extends: .job-defaults
extends: .job-defaults
@@ -333,50 +167,6 @@ applications_system_tests:
@@ -333,50 +167,6 @@ applications_system_tests:
- public-runner-docker
- public-runner-docker
- hived-for-tests
- hived-for-tests
replay_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/no_filter"
replay_with_haf_from_4_9m:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns_4_9m/no_filter"
replay_accounts_filtered_with_haf_from_4_9m:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns_4_9m/accounts_filtered"
replay_accounts_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_filtered"
replay_accounts_operations_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_operations_filtered"
replay_virtual_operations_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/virtual_operations_filtered"
replay_operations_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/operations_filtered"
replay_body_operations_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/body_operations_filtered"
replay_accounts_body_operations_filtered_with_haf:
extends: .replay_step
variables:
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_body_operations_filtered"
replay_with_update:
replay_with_update:
extends: .job-defaults
extends: .job-defaults
@@ -400,361 +190,4 @@ replay_with_update:
@@ -400,361 +190,4 @@ replay_with_update:
- public-runner-docker
- public-runner-docker
- hived-for-tests
- hived-for-tests
replay_with_keyauths:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/tests/integration/state_provider/run_replay_with_keyauth.sh
artifacts:
paths:
- "replay_with_keyauths.log"
- "node_logs.log"
- "node_logs1.log"
tags:
- public-runner-docker
- hived-for-tests
replay_with_json_metadata:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/tests/integration/state_provider/run_replay_with_json_metadata.sh
artifacts:
paths:
- "replay_with_json_metadata.log"
- "node_logs.log"
- "node_logs1.log"
tags:
- public-runner-docker
- hived-for-tests
replay_with_app:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_live_replay_with_app.sh
artifacts:
paths:
- "replay_with_app.log"
- "node_logs.log"
- "node_logs1.log"
tags:
- public-runner-docker
- hived-for-tests
replay_restarts_with_app:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_live_replay_with_restarts_and_app.sh
artifacts:
paths:
- "node_logs.log"
tags:
- public-runner-docker
- hived-for-tests
replay_with_restart:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_replay_with_breaks.sh
artifacts:
paths:
- "node_logs.log"
tags:
- public-runner-docker
- hived-for-tests
update_with_wrong_table_schema:
extends: .job-defaults
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
stage: build_and_test_phase_2
needs:
- job: haf_image_build
artifacts: true
variables:
PG_ACCESS: "host all all 127.0.0.1/32 trust"
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_update_with_wrong_table_schema.sh
artifacts:
paths:
- "update_with_wrong_table_schema.log"
- "node_logs.log"
tags:
- public-runner-docker
- hived-for-tests
block_api_tests:
extends: .replay_step
image: $CI_REGISTRY_IMAGE/ci-base-image:ubuntu22.04-8-jmeter
needs:
- job: haf_image_build
artifacts: true
variables:
FF_NETWORK_PER_BUILD: 1
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_body_operations_filtered"
BENCHMARK_DIR: "$CI_PROJECT_DIR/hive/tests/python/hive-local-tools/tests_api/benchmarks"
script:
# setup
- |
echo -e "\e[0Ksection_start:$(date +%s):blocks_api_test_setup[collapsed=true]\r\e[0KSetting up blocks api tests..."
psql $DB_URL -c "CREATE ROLE bench LOGIN PASSWORD 'mark' INHERIT IN ROLE hived_group;"
export BENCHMARK_DB_URL="postgresql://bench:mark@hfm-only-instance:5432/$DB_NAME"
echo -e "\e[0Ksection_end:$(date +%s):blocks_api_test_setup\r\e[0K"
# run pattern tests
- |
echo -e "\e[0Ksection_start:$(date +%s):blocks_api_test[collapsed=true]\r\e[0KRunning blocks api tests..."
cd "$BENCHMARK_DIR"
python3 benchmark.py --loops 200 --threads 5 -n blocks_api -p 5432 -c perf_5M_light.csv --skip-version-check -d wdir --postgres $BENCHMARK_DB_URL --call-style postgres 2>&1 | tee -i $CI_PROJECT_DIR/python_benchmark.log
echo -e "\e[0Ksection_end:$(date +%s):blocks_api_test\r\e[0K"
# generate JUNIT report file
- |
echo -e "\e[0Ksection_start:$(date +%s):report[collapsed=true]\r\e[0KGenerating Junit report..."
m2u --input "$BENCHMARK_DIR/wdir/raw_jmeter_report.xml" --output $CI_PROJECT_DIR/report.junit
echo -e "\e[0Ksection_end:$(date +%s):report\r\e[0K"
artifacts:
name: "$CI_JOB_NAME-$CI_COMMIT_REF_NAME"
reports:
junit: $CI_PROJECT_DIR/report.junit
paths:
- $BENCHMARK_DIR/wdir
- $CI_PROJECT_DIR/python_benchmark.log
when: always
expire_in: 1 week
tags:
- public-runner-docker
- hived-for-tests
prepare_haf_data:
extends: .prepare_haf_data_5m
needs:
- job: haf_image_build
artifacts: true
stage: build_and_test_phase_1
variables:
HIVE_NETWORK_TYPE: mainnet
BLOCK_LOG_SOURCE_DIR: "$BLOCK_LOG_SOURCE_DIR_5M"
CONFIG_INI_SOURCE: "$CI_PROJECT_DIR/docker/config_5M.ini"
tags:
- data-cache-storage
# Creates a temporary copy of replay data for the exclusive use of current pipeline
replay_data_copy:
extends: .job-defaults
image: "${CI_REGISTRY_IMAGE}/ci-base-image${BUILDER_IMAGE_TAG}"
stage: build_and_test_phase_1
needs:
- prepare_haf_data
- haf_image_build
script:
- |
set -e
echo "Copying replay data to ${PIPELINE_DATA_CACHE_HAF_DIRECTORY:?}"
sudo cp -a "${DATA_CACHE_HAF_PREFIX:?}_${HAF_COMMIT:?}" "${PIPELINE_DATA_CACHE_HAF_DIRECTORY:?}"
tags:
- data-cache-storage
dead_app_auto_detach:
extends: .job-defaults
stage: build_and_test_phase_2
image: "$CI_REGISTRY_IMAGE/ci-base-image$BUILDER_IMAGE_TAG"
needs:
- job: prepare_haf_data
artifacts: true
- job: replay_data_copy
services:
- name: $HAF_IMAGE_NAME
alias: haf-instance
variables:
# Allow access from any network to eliminate CI IP addressing problems when hfm runs as service
PG_ACCESS: |
"host all haf_admin 0.0.0.0/0 trust"
"host all hived 0.0.0.0/0 trust"
"host all test_app_owner 0.0.0.0/0 trust"
DATA_SOURCE: "${PIPELINE_DATA_CACHE_HAF_DIRECTORY}"
LOG_FILE: $CI_JOB_NAME.log
command: ["--replay-blockchain", "--stop-at-block=1000000"]
variables:
HIVED_UID: $HIVED_UID
HAF_COMMIT: $HAF_COMMIT
script:
- $CI_PROJECT_DIR/tests/integration/system/applications/auto_detaching/scenario1.sh "haf-instance"
artifacts:
paths:
- scenario*.log
tags:
- data-cache-storage
start_haf_as_service:
extends: .job-defaults
stage: build_and_test_phase_2
image: "$CI_REGISTRY_IMAGE/ci-base-image$BUILDER_IMAGE_TAG"
needs:
- job: prepare_haf_data
artifacts: true
- job: replay_data_copy
services:
- *haf-service
variables:
HAF_POSTGRES_URL: postgresql://haf_admin@haf-instance:5432/haf_block_log
HIVED_UID: $HIVED_UID
HAF_COMMIT: $HAF_COMMIT
script:
- curl -I haf-instance:8091 || (echo "error connecting to service hived-instance" && false)
- |
curl -XPOST -d '{
"jsonrpc": "2.0",
"method": "database_api.get_dynamic_global_properties",
"params": {
},
"id": 2
}' haf-instance:8091
after_script:
- rm docker_entrypoint.log -f
- cp "${DATA_CACHE_HAF_PREFIX}_${HAF_COMMIT}/datadir/$CI_JOB_NAME.log" "$CI_PROJECT_DIR/docker_entrypoint.log"
artifacts:
paths:
- docker_entrypoint.log
tags:
- data-cache-storage
cleanup_haf_cache_manual:
extends: .cleanup_cache_manual_template
stage: cleanup
variables:
CLEANUP_PATH_PATTERN: "/cache/replay_data_haf_*"
tags:
- data-cache-storage
cleanup_haf_snapshot_from_cache:
extends: .cleanup_cache_manual_template
stage: cleanup
variables:
CLEANUP_PATH_PATTERN: "/cache/snapshots_pipeline_*"
tags:
- data-cache-storage
cleanup_old_haf_cache:
extends: .cleanup_old_cache_template
stage: cleanup
variables:
CLEANUP_PATH_PATTERN: "/cache/replay_data_haf_*"
tags:
- data-cache-storage
# Deletes replay data used by the tests and created by replay_data_copy
cleanup_pipeline_cache:
needs:
- replay_data_copy
- start_haf_as_service
- dead_app_auto_detach
extends:
- .cleanup_cache_manual_template
stage: cleanup
variables:
CLEANUP_PATH_PATTERN: "${DATA_CACHE_HAF_PREFIX}_pipeline_*"
when: always
tags:
- data-cache-storage
build_and_publish_image:
stage: publish
extends: .publish_docker_image_template
before_script:
- !reference [.publish_docker_image_template, before_script]
script:
- scripts/ci-helpers/build_and_publish_instance.sh
tags:
- public-runner-docker
- hived-for-tests
double_haf_replay_tests:
extends: .docker_image_builder_job
stage: build_and_test_phase_2
variables:
CONFIG_INI_SOURCE: "$CI_PROJECT_DIR/docker/config_5M.ini"
needs:
- job: haf_image_build
artifacts: true
script:
- docker container ps -a
- docker images
- docker context ls
tags:
- public-runner-docker
- hived-for-tests
op_body_filter_tests:
extends: .job-defaults
stage: build_and_test_phase_2
needs:
- job: haf_image_build_testnet
artifacts: true
image:
name: $HAF_IMAGE_NAME
entrypoint: [""]
script:
- /home/haf_admin/docker_entrypoint.sh --execute-maintenance-script=$CI_PROJECT_DIR/scripts/maintenance-scripts/run_op_body_filter_tests.sh
artifacts:
paths:
- "op_body_filter_tests.log"
- "**/generated_during_*"
- "**/generated_by_package_fixtures"
exclude:
- "**/generated_during_*/**/block_log"
- "**/generated_during_*/**/block_log.artifacts"
- "**/generated_during_*/**/*.sst"
reports:
junit: tests/integration/tools/op_body_filter/report.xml
when: always
expire_in: 1 week
interruptible: true
tags:
- public-runner-docker
Loading