From 6bb90d26e7185bac8b6f7cf6d2dcb525f557aaad Mon Sep 17 00:00:00 2001
From: Bartek Wrona <wrona@syncad.com>
Date: Wed, 26 Mar 2025 22:01:21 +0100
Subject: [PATCH] block_api_tests job refactored into 2 jobs: one preforming a
 replay to produce data, second performing benchmark on started haf instance

---
 .gitlab-ci.yml | 61 ++++++++++++++++++++++++++++++++++++++------------
 1 file changed, 47 insertions(+), 14 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a72823a1d..b6b140de7 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -9,11 +9,16 @@ stages:
 variables:
   PYTEST_NUMBER_OF_PROCESSES: 8
   CTEST_NUMBER_OF_JOBS: 4
+  
+  GIT_STRATEGY: clone
   GIT_DEPTH: 1
   GIT_SUBMODULE_DEPTH: 1
   GIT_SUBMODULE_STRATEGY: recursive
+  GIT_SUBMODULE_UPDATE_FLAGS: --jobs 4
+
   FF_ENABLE_JOB_CLEANUP: 1
-  GIT_STRATEGY: clone
+  FF_NETWORK_PER_BUILD: 1
+
   # uses registry.gitlab.syncad.com/hive/haf/ci-base-image:ubuntu24.04-1
   BUILDER_IMAGE_TAG: "@sha256:fc149082a4ee91ed622a14d283ae7fe44d13b123f2927d2e71a2167bbe63fab0"
   CI_DEBUG_SERVICES: "true"
@@ -522,24 +527,53 @@ update_with_wrong_table_schema:
     - public-runner-docker
     - hived-for-tests
 
+# job responsible for replaying data using preconfigured filtering options specified in given config.ini file
+replay_filtered_haf_data_accounts_body_operations:
+  extends: .prepare_haf_data_5m
+  needs:
+    - job: haf_image_build
+      artifacts: true
+  stage: build_and_test_phase_1
+  variables:
+    HIVE_NETWORK_TYPE: mainnet
+    BLOCK_LOG_SOURCE_DIR: "$BLOCK_LOG_SOURCE_DIR_5M"
+    CONFIG_INI_SOURCE: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_body_operations_filtered/config.ini"
+    DATA_CACHE_DIR: "${PIPELINE_DATA_CACHE_HAF_DIRECTORY}_replay_accounts_body_operations_filtered"
+  tags:
+    - data-cache-storage
+
 block_api_tests:
-  extends: .replay_step
-  image: $CI_REGISTRY_IMAGE/ci-base-image:ubuntu22.04-8-jmeter
+  extends: .jmeter_benchmark_job
+  stage: build_and_test_phase_2
   needs:
+    - job: replay_filtered_haf_data_accounts_body_operations
+      artifacts: true
     - job: haf_image_build
       artifacts: true
+
   variables:
-    FF_NETWORK_PER_BUILD: 1
-    PATTERNS_PATH: "$CI_PROJECT_DIR/tests/integration/replay/patterns/accounts_body_operations_filtered"
     BENCHMARK_DIR: "$CI_PROJECT_DIR/hive/tests/python/hive-local-tools/tests_api/benchmarks"
-  script:
-    # setup
-    - |
-      echo -e "\e[0Ksection_start:$(date +%s):blocks_api_test_setup[collapsed=true]\r\e[0KSetting up blocks api tests..."
-      psql $DB_URL -c "CREATE ROLE bench LOGIN PASSWORD 'mark' INHERIT IN ROLE hived_group;"
-      export BENCHMARK_DB_URL="postgresql://bench:mark@hfm-only-instance:5432/$DB_NAME"
-      echo -e "\e[0Ksection_end:$(date +%s):blocks_api_test_setup\r\e[0K"
+    # Allow access from any network to eliminate CI IP addressing problems
+    HAF_DB_ACCESS: |
+      "host    all              haf_admin        0.0.0.0/0    trust"
+      "host    all              hived            0.0.0.0/0    trust"
+      "host    all              hafah_user       0.0.0.0/0    trust"
+      "host    all              all              0.0.0.0/0    scram-sha-256"
+
+    BENCHMARK_DB_URL: "postgresql://hived@haf-instance:5432/haf_block_log"
+    HIVED_UID: $HIVED_UID
+
+  services:
+    - name: ${HAF_IMAGE_NAME}
+      alias: haf-instance
+      variables:
+        PG_ACCESS: "${HAF_DB_ACCESS}"
+        DATA_SOURCE: "${PIPELINE_DATA_CACHE_HAF_DIRECTORY}_replay_accounts_body_operations_filtered"
+        LOG_FILE: $CI_JOB_NAME.log
+      command: ["--replay-blockchain", "--stop-at-block=5000000"]
+
 
+  script:
     # run pattern tests
     - |
       echo -e "\e[0Ksection_start:$(date +%s):blocks_api_test[collapsed=true]\r\e[0KRunning blocks api tests..."
@@ -562,8 +596,7 @@ block_api_tests:
     when: always
     expire_in: 1 week
   tags:
-    - public-runner-docker
-    - hived-for-tests
+    - data-cache-storage
 
 prepare_haf_data:
   extends: .prepare_haf_data_5m
-- 
GitLab