Skip to content
Snippets Groups Projects
Commit 6bb90d26 authored by Bartek Wrona's avatar Bartek Wrona
Browse files

block_api_tests job refactored into 2 jobs: one preforming a replay to produce...

block_api_tests job refactored into 2 jobs: one preforming a replay to produce data, second performing benchmark on started haf instance
parent de35afa6
No related branches found
No related tags found
1 merge request!632Update docker images to ubuntu 24.04
Pipeline #118958 passed
......@@ -9,11 +9,16 @@ stages:
variables:
PYTEST_NUMBER_OF_PROCESSES: 8
CTEST_NUMBER_OF_JOBS: 4
GIT_STRATEGY: clone
GIT_DEPTH: 1
GIT_SUBMODULE_DEPTH: 1
GIT_SUBMODULE_STRATEGY: recursive
GIT_SUBMODULE_UPDATE_FLAGS: --jobs 4
FF_ENABLE_JOB_CLEANUP: 1
GIT_STRATEGY: clone
FF_NETWORK_PER_BUILD: 1
# uses registry.gitlab.syncad.com/hive/haf/ci-base-image:ubuntu24.04-1
BUILDER_IMAGE_TAG: "@sha256:fc149082a4ee91ed622a14d283ae7fe44d13b123f2927d2e71a2167bbe63fab0"
CI_DEBUG_SERVICES: "true"
......@@ -522,24 +527,53 @@ update_with_wrong_table_schema:
- public-runner-docker
- hived-for-tests
# job responsible for replaying data using preconfigured filtering options specified in given config.ini file
replay_filtered_haf_data_accounts_body_operations:
extends: .prepare_haf_data_5m
needs:
- job: haf_image_build
artifacts: true
stage: build_and_test_phase_1
variables:
HIVE_NETWORK_TYPE: mainnet
BLOCK_LOG_SOURCE_DIR: "$BLOCK_LOG_SOURCE_DIR_5M"
CONFIG_INI_SOURCE: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_body_operations_filtered/config.ini"
DATA_CACHE_DIR: "${PIPELINE_DATA_CACHE_HAF_DIRECTORY}_replay_accounts_body_operations_filtered"
tags:
- data-cache-storage
block_api_tests:
extends: .replay_step
image: $CI_REGISTRY_IMAGE/ci-base-image:ubuntu22.04-8-jmeter
extends: .jmeter_benchmark_job
stage: build_and_test_phase_2
needs:
- job: replay_filtered_haf_data_accounts_body_operations
artifacts: true
- job: haf_image_build
artifacts: true
variables:
FF_NETWORK_PER_BUILD: 1
PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_body_operations_filtered"
BENCHMARK_DIR: "$CI_PROJECT_DIR/hive/tests/python/hive-local-tools/tests_api/benchmarks"
script:
# setup
- |
echo -e "\e[0Ksection_start:$(date +%s):blocks_api_test_setup[collapsed=true]\r\e[0KSetting up blocks api tests..."
psql $DB_URL -c "CREATE ROLE bench LOGIN PASSWORD 'mark' INHERIT IN ROLE hived_group;"
export BENCHMARK_DB_URL="postgresql://bench:mark@hfm-only-instance:5432/$DB_NAME"
echo -e "\e[0Ksection_end:$(date +%s):blocks_api_test_setup\r\e[0K"
# Allow access from any network to eliminate CI IP addressing problems
HAF_DB_ACCESS: |
"host all haf_admin 0.0.0.0/0 trust"
"host all hived 0.0.0.0/0 trust"
"host all hafah_user 0.0.0.0/0 trust"
"host all all 0.0.0.0/0 scram-sha-256"
BENCHMARK_DB_URL: "postgresql://hived@haf-instance:5432/haf_block_log"
HIVED_UID: $HIVED_UID
services:
- name: ${HAF_IMAGE_NAME}
alias: haf-instance
variables:
PG_ACCESS: "${HAF_DB_ACCESS}"
DATA_SOURCE: "${PIPELINE_DATA_CACHE_HAF_DIRECTORY}_replay_accounts_body_operations_filtered"
LOG_FILE: $CI_JOB_NAME.log
command: ["--replay-blockchain", "--stop-at-block=5000000"]
script:
# run pattern tests
- |
echo -e "\e[0Ksection_start:$(date +%s):blocks_api_test[collapsed=true]\r\e[0KRunning blocks api tests..."
......@@ -562,8 +596,7 @@ block_api_tests:
when: always
expire_in: 1 week
tags:
- public-runner-docker
- hived-for-tests
- data-cache-storage
prepare_haf_data:
extends: .prepare_haf_data_5m
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment