diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 905887ad530629364a64d2de578e0aceb9374c52..5afc4f1ebced2c7d3351ac5a74aeb2229c1b10ee 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -9,19 +9,26 @@ variables: BUILDER_IMAGE_TAG: "@sha256:6b557af6a98188c118d442b68437bf1b20d67e3aae5765269c4fe88465c62d60" SETUP_SCRIPTS_PATH: "$CI_PROJECT_DIR/haf/scripts" REGISTRY: registry.gitlab.syncad.com/hive/hafah - HAFAH_PORT: 6543 + APP_PORT: 6543 HAF_POSTGRES_URL: postgresql://haf_app_admin@haf-instance:5432/haf_block_log - CACHED_BENCHMARK_DIR: "$CI_PROJECT_DIR/cached" - BENCHMARK_SOURCE_DIR: "$CI_PROJECT_DIR/tests/tests_api/benchmarks" + BENCHMARK_SOURCE_DIR: "$CI_PROJECT_DIR/haf/hive/tests/tests_api/benchmarks" + + # Variables required by Common CI jobs + CI_COMMON_JOB_VERSION: "aef66465837540ec59f549f24760d6f70399d633" + IMAGE_REMOVER_TAG: "$CI_COMMON_JOB_VERSION" + TOX_IMAGE_TAG: "$CI_COMMON_JOB_VERSION" include: + - template: Workflows/Branch-Pipelines.gitlab-ci.yml - project: 'hive/haf' - ref: develop + ref: 9cb65db8cc3e36c8aa9b32b2b9978f7aa22f7de8 file: '/scripts/ci-helpers/prepare_data_image_job.yml' - - project: 'hive/hive' - ref: develop + ref: 6d3c2c0f988c87f9ad69f89e1d9710721f0dca19 file: '/scripts/ci-helpers/prepare_data_image_job.yml' + - project: 'hive/common-ci-configuration' + ref: aef66465837540ec59f549f24760d6f70399d633 # It seems a variable cannot be used here + file: '/templates/test_jobs.gitlab-ci.yml' prepare_hived_image: extends: .prepare_hived_data_5m_image @@ -55,7 +62,7 @@ prepare_haf_image: USE_POSTGREST: 0 script: - - docker build --build-arg USE_POSTGREST=$USE_POSTGREST --build-arg HTTP_PORT=$HAFAH_PORT --build-arg POSTGRES_URL="$HAF_POSTGRES_URL" --target=instance -t $HAFAH_IMAGE_NAME -f Dockerfile . + - docker build --build-arg USE_POSTGREST=$USE_POSTGREST --build-arg HTTP_PORT=$APP_PORT --build-arg POSTGRES_URL="$HAF_POSTGRES_URL" --target=instance -t $HAFAH_IMAGE_NAME -f Dockerfile . - echo $HAFAH_CI_IMG_BUILDER_PASSWORD | docker login -u $HAFAH_CI_IMG_BUILDER_USER $REGISTRY --password-stdin - docker push $HAFAH_IMAGE_NAME - echo "HAFAH_IMAGE_NAME=$HAFAH_IMAGE_NAME" > docker_image_name.env @@ -69,32 +76,6 @@ prepare_haf_image: - public-runner-docker - hived-for-tests -prepare_benchmark_environ: - extends: .docker_image_builder_job - stage: build - - before_script: - - apk update && apk add bash ca-certificates wget unzip openjdk11-jre openjdk11-jdk maven git python3 py3-pip - - script: - - mkdir -p $CACHED_BENCHMARK_DIR && cd $CACHED_BENCHMARK_DIR - - chmod a+x $BENCHMARK_SOURCE_DIR/setup_jmeter.bash && $BENCHMARK_SOURCE_DIR/setup_jmeter.bash - - chmod a+x $BENCHMARK_SOURCE_DIR/setup_m2u.bash && $BENCHMARK_SOURCE_DIR/setup_m2u.bash - - cache: - key: bench-tools - paths: - - "$CACHED_BENCHMARK_DIR/" - - artifacts: - paths: - - "$CACHED_BENCHMARK_DIR/" - expire_in: 6 hours - - tags: - - public-runner-docker - - hived-for-tests - prepare_python_hafah_image: extends: .prepare_hafah_image variables: @@ -107,14 +88,13 @@ prepare_postgrest_hafah_image: HAFAH_IMAGE_NAME: $REGISTRY/postgrest-instance:$CI_COMMIT_SHORT_SHA USE_POSTGREST: 1 -.patterns_tests: - extends: .docker_image_builder_job +.pattern_tests: + extends: .pattern_tests_template stage: test variables: - AH_ENDPOINT: hafah_app:$HAFAH_PORT FF_NETWORK_PER_BUILD: 1 PYTHONPATH: "$CI_PROJECT_DIR/tests/test-tools/package" - HAFAH_IMAGE: "" + APP_IMAGE: "" TEST_SUITE: "condenser_api_patterns/get_transaction and not get_transaction_hex or account_history_api or condenser_api_patterns/get_account_history or condenser_api_patterns/get_ops_in_block" DIRECT_HAFAH_CALLS: 0 @@ -122,102 +102,72 @@ prepare_postgrest_hafah_image: - job: prepare_haf_image artifacts: true - services: - - name: $HAF_IMAGE_NAME - alias: haf-instance - variables: - # Allow access from any network to eliminate CI IP addressing problems - PG_ACCESS: "host haf_block_log haf_app_admin 0.0.0.0/0 trust" - - - name: $HAFAH_IMAGE - alias: hafah_app - before_script: - - echo "HAfAH image name $HAFAH_IMAGE" + - echo "HAfAH image name $APP_IMAGE" - echo "HAF image name $HAF_IMAGE_NAME" - - apk update && apk add bash git ca-certificates curl build-base python3-dev - - python3 -m ensurepip - - pip3 install tox script: - pip3 install -r $CI_PROJECT_DIR/haf/hive/tests/api_tests/comparsion_tests/requirements.txt # run pattern tests - cd $CI_PROJECT_DIR/haf/hive/tests/api_tests/pattern_tests - - ./run_tests.sh $AH_ENDPOINT `git rev-parse --show-toplevel` "${TEST_SUITE}" ${DIRECT_HAFAH_CALLS} + - ./run_tests.sh $ENDPOINT `git rev-parse --show-toplevel` "${TEST_SUITE}" ${DIRECT_HAFAH_CALLS} artifacts: - when: always - reports: - junit: haf/hive/tests/api_tests/pattern_tests/results.xml paths: - "$CI_JOB_NAME" - "**/from_node.log" - "**/ah.log" - "**/*.out.json" - - "$CI_PROJECT_DIR/tests/tests_api/hived/workdir_*" - when: always - expire_in: 6 hours + - "tests/tests_api/hived/workdir_*" + - "tests/api_tests/pattern_tests/results.xml" + reports: + junit: haf/hive/tests/api_tests/pattern_tests/results.xml tags: - public-runner-docker - hived-for-tests -python_patterns_tests: - extends: .patterns_tests +python_pattern_tests: + extends: .pattern_tests needs: - - !reference [.patterns_tests, needs] + - !reference [.pattern_tests, needs] - job: prepare_python_hafah_image artifacts: true variables: - HAFAH_IMAGE: $HAFAH_IMAGE_NAME - -postgrest_patterns_tests: - extends: .patterns_tests - - needs: - - !reference [.patterns_tests, needs] - - job: prepare_postgrest_hafah_image - artifacts: true - - variables: - HAFAH_IMAGE: $HAFAH_IMAGE_NAME + APP_IMAGE: $HAFAH_IMAGE_NAME -postgrest_patterns_tests: - extends: .patterns_tests +postgrest_pattern_tests: + extends: .pattern_tests needs: - - !reference [.patterns_tests, needs] + - !reference [.pattern_tests, needs] - job: prepare_postgrest_hafah_image artifacts: true variables: - HAFAH_IMAGE: $HAFAH_IMAGE_NAME - TEST_SUITE: "block_api_patterns" + APP_IMAGE: $HAFAH_IMAGE_NAME -new_style_postgrest_patterns_tests: - extends: .patterns_tests +new_style_postgrest_pattern_tests: + extends: .pattern_tests needs: - - !reference [.patterns_tests, needs] + - !reference [.pattern_tests, needs] - job: prepare_postgrest_hafah_image artifacts: true variables: - HAFAH_IMAGE: $HAFAH_IMAGE_NAME + APP_IMAGE: $HAFAH_IMAGE_NAME # Direct call version does not support condenser_api TEST_SUITE: "account_history_api" DIRECT_HAFAH_CALLS: 1 .comparison_tests: - extends: .docker_image_builder_job + extends: .comparison_tests_template stage: test variables: - AH_ENDPOINT: hafah_app:$HAFAH_PORT - HIVED_ENDPOINT: hived-instance:8090 FF_NETWORK_PER_BUILD: 1 PYTHONPATH: "$CI_PROJECT_DIR/tests/test-tools/package" - HAFAH_IMAGE: "" needs: - job: prepare_haf_image @@ -225,44 +175,23 @@ new_style_postgrest_patterns_tests: - job: prepare_hived_image artifacts: true - services: - - name: $HAF_IMAGE_NAME - alias: haf-instance - variables: - # Allow access from any network to eliminate CI IP addressing problems - PG_ACCESS: "host haf_block_log haf_app_admin 0.0.0.0/0 trust" - - - name: $HAFAH_IMAGE - alias: hafah_app - - - name: $HIVED_IMAGE_NAME - alias: hived-instance - - before_script: - - apk update && apk add bash git ca-certificates curl build-base python3-dev - - python3 -m ensurepip - - pip3 install tox - script: - pip3 install -r $CI_PROJECT_DIR/haf/hive/tests/api_tests/comparsion_tests/requirements.txt "$CI_PROJECT_DIR/tests/test-tools" # run comparsion tests - cd $CI_PROJECT_DIR/haf/hive/tests/api_tests/comparsion_tests - - python3 -m pytest -n 8 --ref http://$HIVED_ENDPOINT --test http://$AH_ENDPOINT --start 4900000 --stop 4915000 --junitxml=$CI_PROJECT_DIR/comparsion_tests.xml + - python3 -m pytest -n 8 --ref http://$HIVED_ENDPOINT --test http://$ENDPOINT --start 4900000 --stop 4915000 --junitxml=$CI_PROJECT_DIR/comparsion_tests.xml artifacts: - when: always - reports: - junit: - - $CI_PROJECT_DIR/comparsion_tests.xml - paths: - "$CI_JOB_NAME" - "**/from_node.log" - "**/ah.log" - "**/*.out.json" - - "$CI_PROJECT_DIR/tests/tests_api/hived/workdir_*" - when: always - expire_in: 6 hours + - "haf/hive/tests/tests_api/hived/workdir_*" + - "comparsion_tests.xml" + reports: + junit: + - comparsion_tests.xml tags: - public-runner-docker - hived-for-tests @@ -276,7 +205,7 @@ python_comparison_tests: artifacts: true variables: - HAFAH_IMAGE: $HAFAH_IMAGE_NAME + APP_IMAGE: $HAFAH_IMAGE_NAME postgrest_comparison_tests: extends: .comparison_tests @@ -287,53 +216,29 @@ postgrest_comparison_tests: artifacts: true variables: - HAFAH_IMAGE: $HAFAH_IMAGE_NAME + APP_IMAGE: $HAFAH_IMAGE_NAME .benchmark_tests: - extends: .docker_image_builder_job + extends: .jmeter_benchmark_with_haf_job stage: test variables: FF_NETWORK_PER_BUILD: 1 API_FOR_TESTING: "account_history_api" # alternatively: blocks_api PYTHONPATH: "$CI_PROJECT_DIR/tests/test-tools/package" - HAFAH_IMAGE: "" - CACHE_DIR: "$CI_PROJECT_DIR/cached" - + APP_IMAGE: "" needs: - job: prepare_haf_image artifacts: true - - job: prepare_benchmark_environ - artifacts: true - - services: - - name: $HAF_IMAGE_NAME - alias: haf-instance - variables: - # Allow access from any network to eliminate CI IP addressing problems - PG_ACCESS: "host haf_block_log haf_app_admin 0.0.0.0/0 trust" - - - name: $HAFAH_IMAGE - alias: hafah_app - - before_script: - - apk update && apk add bash ca-certificates wget unzip openjdk11-jre openjdk11-jdk maven git python3 py3-pip - - pip3 install --user prettytable script: - - source $CACHED_BENCHMARK_DIR/jmeter/activate - - source $CACHED_BENCHMARK_DIR/m2u/activate - - $JMETER --version - - /usr/bin/python3 $CI_PROJECT_DIR/tests/tests_api/benchmarks/benchmark.py -a hafah_app -p $HAFAH_PORT -c perf_5M_heavy.csv -j $JMETER -d $CI_PROJECT_DIR/wdir -n $API_FOR_TESTING - - $M2U --input wdir/raw_jmeter_report.xml --output wdir/jmeter_junit_report.xml - + - /usr/bin/python3 $CI_PROJECT_DIR/tests/tests_api/benchmarks/benchmark.py -a app -p $APP_PORT -c perf_5M_heavy.csv -d $CI_PROJECT_DIR/wdir -n $API_FOR_TESTING + - m2u --input wdir/raw_jmeter_report.xml --output wdir/jmeter_junit_report.xml + - jmeter -g wdir/jmeter_${APP_PORT}_output.csv -o wdir/dashboard/ artifacts: - when: always - reports: - junit: $CI_PROJECT_DIR/wdir/jmeter_junit_report.xml paths: - - $CI_PROJECT_DIR/wdir/ - when: always - expire_in: 6 hours + - wdir/ + reports: + junit: wdir/jmeter_junit_report.xml tags: - public-runner-docker - hived-for-tests @@ -347,8 +252,7 @@ python_account_history_benchmark_tests: artifacts: true variables: - HAFAH_IMAGE: $HAFAH_IMAGE_NAME - + APP_IMAGE: $HAFAH_IMAGE_NAME postgrest_block_api_benchmark_tests: extends: .benchmark_tests @@ -359,7 +263,7 @@ postgrest_block_api_benchmark_tests: artifacts: true variables: - HAFAH_IMAGE: $HAFAH_IMAGE_NAME + APP_IMAGE: $HAFAH_IMAGE_NAME API_FOR_TESTING: blocks_api postgrest_account_history_benchmark_tests: @@ -371,4 +275,4 @@ postgrest_account_history_benchmark_tests: artifacts: true variables: - HAFAH_IMAGE: $HAFAH_IMAGE_NAME + APP_IMAGE: $HAFAH_IMAGE_NAME diff --git a/scripts/setup_jmeter.bash b/scripts/setup_jmeter.bash deleted file mode 100644 index aa6082fcf543e3e0eda3c33c160740699449bd5a..0000000000000000000000000000000000000000 --- a/scripts/setup_jmeter.bash +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash - -set -euo pipefail - -WORKDIR=jmeter -JMETER_DOWNLOAD_URL="https://dlcdn.apache.org//jmeter/binaries/apache-jmeter-5.4.3.zip" -JMETER_POSTGRES_DOWNLOAD_URL="https://jdbc.postgresql.org/download/postgresql-42.3.1.jar" - -if [[ -f "$WORKDIR/activate" ]]; then - echo "using cached jmeter" - exit 0 -fi - -echo "creating work directory" -mkdir -p "$WORKDIR" - -pushd "$WORKDIR" - -echo "downloading jmeter" -wget --quiet "$JMETER_DOWNLOAD_URL" > /dev/null - -echo "unzipping jmeter" -unzip -qq apache-*.zip > /dev/null - -echo "removing archive and renaming jmeter directory" -rm apache-*.zip -mv apache* apache - - pushd apache - pushd lib - echo "downloading postgres driver for jmeter" - wget --quiet "$JMETER_POSTGRES_DOWNLOAD_URL" > /dev/null - popd - - export JMETER="$PWD/bin/jmeter" - popd - echo "JMETER=$JMETER" > activate - - echo "testing is jmeter properly configured" - $JMETER --version -popd - - diff --git a/scripts/setup_m2u.bash b/scripts/setup_m2u.bash deleted file mode 100644 index 4daff8ae6fd6583e7ec7d0608d870518806d7e73..0000000000000000000000000000000000000000 --- a/scripts/setup_m2u.bash +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash - -set -euo pipefail - -WORKDIR=m2u -M2U_URL="https://github.com/tguzik/m2u.git" - -if [[ -f "$WORKDIR/activate" ]]; then - echo "using cached jmeter" - exit 0 -fi - -echo "creating work directory" -mkdir -p "$WORKDIR" - -pushd "$WORKDIR" - - echo "downloading m2u" - git clone "$M2U_URL" --single-branch -b master . - - echo "configuring m2u" - mvn 2>&1 >/dev/null - - echo "M2U='java -jar $PWD/target/m2u.jar'" > activate - -popd