Skip to content
Snippets Groups Projects
Commit 134807c9 authored by Dan Notestein's avatar Dan Notestein
Browse files

Update submodules:

 - haf: develop (e52e41222bf6586c39100ece8e1e64515df0302f)
parent 2ad6bf27
No related branches found
No related tags found
1 merge request!173Update submodules:
Pipeline #111382 failed
stages: stages:
- build - build
- test - test
- publish - publish
- cleanup - cleanup
variables: variables:
GIT_STRATEGY: clone GIT_STRATEGY: clone
...@@ -26,10 +26,10 @@ variables: ...@@ -26,10 +26,10 @@ variables:
# Allow access from any network to eliminate CI IP addressing problems # Allow access from any network to eliminate CI IP addressing problems
HAF_DB_ACCESS: | HAF_DB_ACCESS: |
"host all haf_admin 0.0.0.0/0 trust" "host all haf_admin 0.0.0.0/0 trust"
"host all hived 0.0.0.0/0 trust" "host all hived 0.0.0.0/0 trust"
"host all hafah_user 0.0.0.0/0 trust" "host all hafah_user 0.0.0.0/0 trust"
"host all all 0.0.0.0/0 scram-sha-256" "host all all 0.0.0.0/0 scram-sha-256"
# Variables required by Common CI jobs # Variables required by Common CI jobs
...@@ -42,18 +42,18 @@ variables: ...@@ -42,18 +42,18 @@ variables:
BLOCK_LOG_SOURCE_DIR_5M: /blockchain/block_log_5m BLOCK_LOG_SOURCE_DIR_5M: /blockchain/block_log_5m
include: include:
- template: Workflows/Branch-Pipelines.gitlab-ci.yml - template: Workflows/Branch-Pipelines.gitlab-ci.yml
- project: 'hive/haf' - project: 'hive/haf'
ref: 6fa48e73dcf06140424d8c70e610fa421313ac0d # develop ref: e52e41222bf6586c39100ece8e1e64515df0302f # develop
file: '/scripts/ci-helpers/prepare_data_image_job.yml' file: '/scripts/ci-helpers/prepare_data_image_job.yml'
- project: 'hive/common-ci-configuration' - project: 'hive/common-ci-configuration'
ref: f197ce0134cb92761a0f0e6c315bc3d79747e0e1 # develop # It seems a variable cannot be used here ref: f197ce0134cb92761a0f0e6c315bc3d79747e0e1 # develop # It seems a variable cannot be used here
file: file:
- '/templates/test_jobs.gitlab-ci.yml' - '/templates/test_jobs.gitlab-ci.yml'
- '/templates/python_projects.gitlab-ci.yml' - '/templates/python_projects.gitlab-ci.yml'
- '/templates/docker_image_jobs.gitlab-ci.yml' - '/templates/docker_image_jobs.gitlab-ci.yml'
- '/templates/cache_cleanup.gitlab-ci.yml' - '/templates/cache_cleanup.gitlab-ci.yml'
- '/templates/npm_projects.gitlab-ci.yml' - '/templates/npm_projects.gitlab-ci.yml'
verify_poetry_lock_sanity: verify_poetry_lock_sanity:
extends: .verify_poetry_lock_sanity_template extends: .verify_poetry_lock_sanity_template
...@@ -61,7 +61,7 @@ verify_poetry_lock_sanity: ...@@ -61,7 +61,7 @@ verify_poetry_lock_sanity:
variables: variables:
PYPROJECT_DIR: "$CI_PROJECT_DIR/tests/integration/hafah-local-tools" PYPROJECT_DIR: "$CI_PROJECT_DIR/tests/integration/hafah-local-tools"
tags: tags:
- public-runner-docker - public-runner-docker
prepare_hived_image: prepare_hived_image:
extends: .prepare_hived_image extends: .prepare_hived_image
...@@ -71,21 +71,21 @@ prepare_hived_image: ...@@ -71,21 +71,21 @@ prepare_hived_image:
REGISTRY_USER: "$HIVED_CI_IMGBUILDER_USER" REGISTRY_USER: "$HIVED_CI_IMGBUILDER_USER"
REGISTRY_PASS: $HIVED_CI_IMGBUILDER_PASSWORD REGISTRY_PASS: $HIVED_CI_IMGBUILDER_PASSWORD
tags: tags:
- public-runner-docker - public-runner-docker
- hived-for-tests - hived-for-tests
prepare_hived_data: prepare_hived_data:
extends: .prepare_hived_data_5m extends: .prepare_hived_data_5m
needs: needs:
- job: prepare_hived_image - job: prepare_hived_image
artifacts: true artifacts: true
stage: build stage: build
variables: variables:
SUBMODULE_DIR: "$CI_PROJECT_DIR/haf/hive" SUBMODULE_DIR: "$CI_PROJECT_DIR/haf/hive"
BLOCK_LOG_SOURCE_DIR: $BLOCK_LOG_SOURCE_DIR_5M BLOCK_LOG_SOURCE_DIR: $BLOCK_LOG_SOURCE_DIR_5M
CONFIG_INI_SOURCE: "$CI_PROJECT_DIR/haf/hive/docker/config_5M.ini" CONFIG_INI_SOURCE: "$CI_PROJECT_DIR/haf/hive/docker/config_5M.ini"
tags: tags:
- data-cache-storage - data-cache-storage
prepare_haf_image: prepare_haf_image:
extends: .prepare_haf_image extends: .prepare_haf_image
...@@ -95,8 +95,8 @@ prepare_haf_image: ...@@ -95,8 +95,8 @@ prepare_haf_image:
REGISTRY_USER: "$CI_IMG_BUILDER_USER" REGISTRY_USER: "$CI_IMG_BUILDER_USER"
REGISTRY_PASS: $CI_IMG_BUILDER_PASSWORD REGISTRY_PASS: $CI_IMG_BUILDER_PASSWORD
tags: tags:
- public-runner-docker - public-runner-docker
- hived-for-tests - hived-for-tests
extract-swagger-json: extract-swagger-json:
extends: .filter_out_swagger_json extends: .filter_out_swagger_json
...@@ -104,7 +104,7 @@ extract-swagger-json: ...@@ -104,7 +104,7 @@ extract-swagger-json:
variables: variables:
INPUT_SQL_SWAGGER_FILE: "${CI_PROJECT_DIR}/postgrest/hafah_REST/hafah_openapi.sql" INPUT_SQL_SWAGGER_FILE: "${CI_PROJECT_DIR}/postgrest/hafah_REST/hafah_openapi.sql"
tags: tags:
- public-runner-docker - public-runner-docker
generate-wax-spec: generate-wax-spec:
extends: .generate_swagger_package extends: .generate_swagger_package
...@@ -115,23 +115,23 @@ generate-wax-spec: ...@@ -115,23 +115,23 @@ generate-wax-spec:
NPM_PACKAGE_SCOPE: "@hiveio" NPM_PACKAGE_SCOPE: "@hiveio"
NPM_PACKAGE_NAME: "wax-api-hafah" NPM_PACKAGE_NAME: "wax-api-hafah"
needs: needs:
- job: extract-swagger-json - job: extract-swagger-json
artifacts: true artifacts: true
tags: tags:
- public-runner-docker - public-runner-docker
prepare_haf_data: prepare_haf_data:
extends: .prepare_haf_data_5m extends: .prepare_haf_data_5m
needs: needs:
- job: prepare_haf_image - job: prepare_haf_image
artifacts: true artifacts: true
stage: build stage: build
variables: variables:
SUBMODULE_DIR: "$CI_PROJECT_DIR/haf" SUBMODULE_DIR: "$CI_PROJECT_DIR/haf"
BLOCK_LOG_SOURCE_DIR: $BLOCK_LOG_SOURCE_DIR_5M BLOCK_LOG_SOURCE_DIR: $BLOCK_LOG_SOURCE_DIR_5M
CONFIG_INI_SOURCE: "$CI_PROJECT_DIR/haf/docker/config_5M.ini" CONFIG_INI_SOURCE: "$CI_PROJECT_DIR/haf/docker/config_5M.ini"
tags: tags:
- data-cache-storage - data-cache-storage
.prepare_hafah_image: .prepare_hafah_image:
extends: .docker_image_builder_job extends: .docker_image_builder_job
...@@ -144,28 +144,28 @@ prepare_haf_data: ...@@ -144,28 +144,28 @@ prepare_haf_data:
GIT_STRATEGY: "clone" GIT_STRATEGY: "clone"
script: script:
- | - |
echo $HAFAH_IMAGE_NAME echo $HAFAH_IMAGE_NAME
echo $SOURCE_DIR echo $SOURCE_DIR
scripts/ci-helpers/build_instance.sh scripts/ci-helpers/build_instance.sh
docker login -u "$HAFAH_CI_IMG_BUILDER_USER" -p "$HAFAH_CI_IMG_BUILDER_PASSWORD" "$REGISTRY" docker login -u "$HAFAH_CI_IMG_BUILDER_USER" -p "$HAFAH_CI_IMG_BUILDER_PASSWORD" "$REGISTRY"
docker push "$HAFAH_IMAGE_NAME" docker push "$HAFAH_IMAGE_NAME"
docker push "$CI_REGISTRY_IMAGE/postgrest-rewriter:$HAFAH_IMAGE_TAG" docker push "$CI_REGISTRY_IMAGE/postgrest-rewriter:$HAFAH_IMAGE_TAG"
echo "HAFAH_IMAGE_NAME=$HAFAH_IMAGE_NAME" > docker_image_name.env echo "HAFAH_IMAGE_NAME=$HAFAH_IMAGE_NAME" > docker_image_name.env
if [[ -n "$CI_COMMIT_TAG" ]]; then if [[ -n "$CI_COMMIT_TAG" ]]; then
docker tag "$HAFAH_IMAGE_NAME" "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_TAG" docker tag "$HAFAH_IMAGE_NAME" "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_TAG"
docker push "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_TAG" docker push "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_TAG"
docker tag "$CI_REGISTRY_IMAGE/postgrest-rewriter:$HAFAH_IMAGE_TAG" "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_TAG" docker tag "$CI_REGISTRY_IMAGE/postgrest-rewriter:$HAFAH_IMAGE_TAG" "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_TAG"
docker push "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_TAG" docker push "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_TAG"
fi fi
artifacts: artifacts:
reports: reports:
dotenv: docker_image_name.env dotenv: docker_image_name.env
tags: tags:
- public-runner-docker - public-runner-docker
- hived-for-tests - hived-for-tests
prepare_postgrest_hafah_image: prepare_postgrest_hafah_image:
extends: .prepare_hafah_image extends: .prepare_hafah_image
...@@ -177,12 +177,14 @@ build_setup_docker_image: ...@@ -177,12 +177,14 @@ build_setup_docker_image:
extends: .docker_image_builder_job extends: .docker_image_builder_job
stage: build stage: build
tags: tags:
- public-runner-docker - public-runner-docker
script: script:
- "echo \"TRUNCATE TABLE hafah_python.version; INSERT INTO hafah_python.version(git_hash) VALUES ('$CI_COMMIT_SHA');\" > set_version_in_sql.pgsql" - "echo \"TRUNCATE TABLE hafah_python.version; INSERT INTO hafah_python.version(git_hash)\
- "docker build --tag=$CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA -f Dockerfile.setup ." \ VALUES ('$CI_COMMIT_SHA');\" > set_version_in_sql.pgsql"
- "docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY" - "docker build --tag=$CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA -f Dockerfile.setup\
- "docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA" \ ."
- "docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY"
- "docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA"
.pattern_tests: .pattern_tests:
extends: .haf_app_pattern_tests_template extends: .haf_app_pattern_tests_template
...@@ -193,22 +195,23 @@ build_setup_docker_image: ...@@ -193,22 +195,23 @@ build_setup_docker_image:
HAF_APP_USER: "hafah_user" HAF_APP_USER: "hafah_user"
PYTEST_BASED_IMAGE_NAME: ${PYTEST_RUNTIME_IMAGE_NAME} PYTEST_BASED_IMAGE_NAME: ${PYTEST_RUNTIME_IMAGE_NAME}
POETRY_INSTALL_ROOT_DIR : ${POETRY_INSTALL_DIR} POETRY_INSTALL_ROOT_DIR: ${POETRY_INSTALL_DIR}
TEST_SUITE: "condenser_api_patterns/get_transaction and not get_transaction_hex or account_history_api or condenser_api_patterns/get_account_history or condenser_api_patterns/get_ops_in_block" TEST_SUITE: "condenser_api_patterns/get_transaction and not get_transaction_hex\
\ or account_history_api or condenser_api_patterns/get_account_history or condenser_api_patterns/get_ops_in_block"
PATTERN_TESTS_DIR: "${CI_PROJECT_DIR}/haf/hive/tests/python/api_tests/pattern_tests" PATTERN_TESTS_DIR: "${CI_PROJECT_DIR}/haf/hive/tests/python/api_tests/pattern_tests"
JUNIT_REPORT: "haf/hive/tests/python/api_tests/pattern_tests/results.xml" JUNIT_REPORT: "haf/hive/tests/python/api_tests/pattern_tests/results.xml"
DIRECT_CALLS: 0 DIRECT_CALLS: 0
HIVED_UID: $HIVED_UID HIVED_UID: $HIVED_UID
needs: needs:
- job: prepare_haf_data - job: prepare_haf_data
artifacts: true artifacts: true
before_script: before_script:
- !reference [.haf_app_pattern_tests_template, before_script] - !reference [.haf_app_pattern_tests_template, before_script]
- echo "HAfAH image name $HAF_APP_IMAGE" - echo "HAfAH image name $HAF_APP_IMAGE"
- echo "HAF image name $HAF_IMAGE_NAME" - echo "HAF image name $HAF_IMAGE_NAME"
artifacts: artifacts:
paths: paths:
...@@ -220,15 +223,15 @@ build_setup_docker_image: ...@@ -220,15 +223,15 @@ build_setup_docker_image:
- "haf/hive/tests/python/api_tests/pattern_tests/results.xml" - "haf/hive/tests/python/api_tests/pattern_tests/results.xml"
tags: tags:
- data-cache-storage - data-cache-storage
postgrest_pattern_tests: postgrest_pattern_tests:
extends: .pattern_tests extends: .pattern_tests
needs: needs:
- !reference [.pattern_tests, needs] - !reference [.pattern_tests, needs]
- job: prepare_postgrest_hafah_image - job: prepare_postgrest_hafah_image
artifacts: true artifacts: true
variables: variables:
HAF_APP_IMAGE: $HAFAH_IMAGE_NAME HAF_APP_IMAGE: $HAFAH_IMAGE_NAME
...@@ -237,9 +240,9 @@ new_style_postgrest_pattern_tests: ...@@ -237,9 +240,9 @@ new_style_postgrest_pattern_tests:
extends: .pattern_tests extends: .pattern_tests
needs: needs:
- !reference [.pattern_tests, needs] - !reference [.pattern_tests, needs]
- job: prepare_postgrest_hafah_image - job: prepare_postgrest_hafah_image
artifacts: true artifacts: true
variables: variables:
HAF_APP_IMAGE: $HAFAH_IMAGE_NAME HAF_APP_IMAGE: $HAFAH_IMAGE_NAME
...@@ -252,7 +255,7 @@ new_style_postgrest_pattern_tests: ...@@ -252,7 +255,7 @@ new_style_postgrest_pattern_tests:
stage: test stage: test
variables: variables:
PYTEST_BASED_IMAGE_NAME: ${PYTEST_RUNTIME_IMAGE_NAME} PYTEST_BASED_IMAGE_NAME: ${PYTEST_RUNTIME_IMAGE_NAME}
POETRY_INSTALL_ROOT_DIR : ${POETRY_INSTALL_DIR} POETRY_INSTALL_ROOT_DIR: ${POETRY_INSTALL_DIR}
COMPARISON_TESTS_DIR: "$CI_PROJECT_DIR/haf/hive/tests/python/api_tests/comparsion_tests" COMPARISON_TESTS_DIR: "$CI_PROJECT_DIR/haf/hive/tests/python/api_tests/comparsion_tests"
HAF_APP_PORT: ${APP_PORT} HAF_APP_PORT: ${APP_PORT}
HAF_APP_USER: "hafah_user" HAF_APP_USER: "hafah_user"
...@@ -260,31 +263,31 @@ new_style_postgrest_pattern_tests: ...@@ -260,31 +263,31 @@ new_style_postgrest_pattern_tests:
HIVED_UID: $HIVED_UID HIVED_UID: $HIVED_UID
needs: needs:
- job: prepare_haf_data - job: prepare_haf_data
artifacts: true artifacts: true
- job: prepare_hived_data - job: prepare_hived_data
artifacts: true artifacts: true
artifacts: artifacts:
paths: paths:
- "$CI_JOB_NAME" - "$CI_JOB_NAME"
- "**/from_node.log" - "**/from_node.log"
- "**/ah.log" - "**/ah.log"
- "**/*.out.json" - "**/*.out.json"
reports: reports:
junit: "haf/hive/tests/python/api_tests/comparsion_tests/comparsion_tests.xml" junit: "haf/hive/tests/python/api_tests/comparsion_tests/comparsion_tests.xml"
tags: tags:
- data-cache-storage - data-cache-storage
postgrest_comparison_tests: postgrest_comparison_tests:
extends: .comparison_tests extends: .comparison_tests
needs: needs:
- !reference [.comparison_tests, needs] - !reference [.comparison_tests, needs]
- job: prepare_postgrest_hafah_image - job: prepare_postgrest_hafah_image
artifacts: true artifacts: true
variables: variables:
HAF_APP_IMAGE: $HAFAH_IMAGE_NAME HAF_APP_IMAGE: $HAFAH_IMAGE_NAME
...@@ -299,22 +302,23 @@ postgrest_comparison_tests: ...@@ -299,22 +302,23 @@ postgrest_comparison_tests:
HAF_APP_IMAGE: $HAFAH_IMAGE_NAME HAF_APP_IMAGE: $HAFAH_IMAGE_NAME
HAF_APP_PORT: $APP_PORT HAF_APP_PORT: $APP_PORT
needs: needs:
- job: prepare_haf_data - job: prepare_haf_data
artifacts: true artifacts: true
- job: prepare_postgrest_hafah_image - job: prepare_postgrest_hafah_image
artifacts: true artifacts: true
script: script:
- /usr/bin/python3 "${BENCHMARK_SOURCE_DIR}/benchmark.py" -a app -p $APP_PORT -c perf_5M_heavy.csv -d $CI_PROJECT_DIR/wdir -n $API_FOR_TESTING - /usr/bin/python3 "${BENCHMARK_SOURCE_DIR}/benchmark.py" -a app -p $APP_PORT -c
- m2u --input wdir/raw_jmeter_report.xml --output wdir/jmeter_junit_report.xml perf_5M_heavy.csv -d $CI_PROJECT_DIR/wdir -n $API_FOR_TESTING
- jmeter -g wdir/jmeter_${APP_PORT}_output.csv -o wdir/dashboard/ - m2u --input wdir/raw_jmeter_report.xml --output wdir/jmeter_junit_report.xml
- jmeter -g wdir/jmeter_${APP_PORT}_output.csv -o wdir/dashboard/
artifacts: artifacts:
paths: paths:
- wdir/ - wdir/
reports: reports:
junit: wdir/jmeter_junit_report.xml junit: wdir/jmeter_junit_report.xml
tags: tags:
- data-cache-storage - data-cache-storage
postgrest_block_api_benchmark_tests: postgrest_block_api_benchmark_tests:
extends: .benchmark_tests extends: .benchmark_tests
...@@ -334,25 +338,26 @@ postgrest_rest_benchmark_tests: ...@@ -334,25 +338,26 @@ postgrest_rest_benchmark_tests:
HAF_APP_IMAGE: $HAFAH_IMAGE_NAME HAF_APP_IMAGE: $HAFAH_IMAGE_NAME
HAF_APP_PORT: $APP_PORT HAF_APP_PORT: $APP_PORT
needs: needs:
- job: prepare_haf_data - job: prepare_haf_data
artifacts: true artifacts: true
- job: prepare_postgrest_hafah_image - job: prepare_postgrest_hafah_image
artifacts: true artifacts: true
script: script:
- timeout -k 1m 10m ./tests/performance/run_performance_tests.sh --backend-host=app --backend-port=$APP_PORT - timeout -k 1m 10m ./tests/performance/run_performance_tests.sh --backend-host=app
- tar -czvf tests/performance/results.tar.gz $(pwd)/tests/performance/*result.* --backend-port=$APP_PORT
- cat jmeter.log | python3 tests/performance/parse-jmeter-output.py - tar -czvf tests/performance/results.tar.gz $(pwd)/tests/performance/*result.*
- m2u --input $(pwd)/tests/performance/result.xml --output $(pwd)/tests/performance/junit-result.xml - cat jmeter.log | python3 tests/performance/parse-jmeter-output.py
- m2u --input $(pwd)/tests/performance/result.xml --output $(pwd)/tests/performance/junit-result.xml
artifacts: artifacts:
paths: paths:
- docker/container-logs.tar.gz - docker/container-logs.tar.gz
- tests/performance/result_report/ - tests/performance/result_report/
- tests/performance/results.tar.gz - tests/performance/results.tar.gz
- jmeter.log - jmeter.log
reports: reports:
junit: tests/performance/junit-result.xml junit: tests/performance/junit-result.xml
tags: tags:
- data-cache-storage - data-cache-storage
cleanup_hive_cache_manual: cleanup_hive_cache_manual:
extends: .cleanup_cache_manual_template extends: .cleanup_cache_manual_template
...@@ -360,7 +365,7 @@ cleanup_hive_cache_manual: ...@@ -360,7 +365,7 @@ cleanup_hive_cache_manual:
variables: variables:
CLEANUP_PATH_PATTERN: "/cache/replay_data_hive_*" CLEANUP_PATH_PATTERN: "/cache/replay_data_hive_*"
tags: tags:
- data-cache-storage - data-cache-storage
cleanup_haf_cache_manual: cleanup_haf_cache_manual:
extends: .cleanup_cache_manual_template extends: .cleanup_cache_manual_template
...@@ -368,18 +373,18 @@ cleanup_haf_cache_manual: ...@@ -368,18 +373,18 @@ cleanup_haf_cache_manual:
variables: variables:
CLEANUP_PATH_PATTERN: "/cache/replay_data_haf_*" CLEANUP_PATH_PATTERN: "/cache/replay_data_haf_*"
tags: tags:
- data-cache-storage - data-cache-storage
build_and_publish_image: build_and_publish_image:
stage: publish stage: publish
extends: .publish_docker_image_template extends: .publish_docker_image_template
before_script: before_script:
- !reference [.publish_docker_image_template, before_script] - !reference [.publish_docker_image_template, before_script]
script: script:
- scripts/ci-helpers/build_and_publish_instance.sh - scripts/ci-helpers/build_and_publish_instance.sh
tags: tags:
- public-runner-docker - public-runner-docker
- hived-for-tests - hived-for-tests
prepare_haf_image_testnet: prepare_haf_image_testnet:
extends: .prepare_haf_image extends: .prepare_haf_image
...@@ -391,8 +396,8 @@ prepare_haf_image_testnet: ...@@ -391,8 +396,8 @@ prepare_haf_image_testnet:
HIVE_NETWORK_TYPE: testnet HIVE_NETWORK_TYPE: testnet
BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-testnet-binaries" BINARY_CACHE_PATH: "$CI_PROJECT_DIR/haf-testnet-binaries"
tags: tags:
- public-runner-docker - public-runner-docker
- hived-for-tests - hived-for-tests
.hfm-only-service: &hfm-only-service .hfm-only-service: &hfm-only-service
name: $HAF_IMAGE_NAME name: $HAF_IMAGE_NAME
...@@ -400,58 +405,58 @@ prepare_haf_image_testnet: ...@@ -400,58 +405,58 @@ prepare_haf_image_testnet:
variables: variables:
# Allow access from any network to eliminate CI IP addressing problems when hfm runs as service # Allow access from any network to eliminate CI IP addressing problems when hfm runs as service
PG_ACCESS: | PG_ACCESS: |
"host all haf_admin 0.0.0.0/0 trust" "host all haf_admin 0.0.0.0/0 trust"
"host all hived 0.0.0.0/0 trust" "host all hived 0.0.0.0/0 trust"
"host all hafah_user 0.0.0.0/0 trust" "host all hafah_user 0.0.0.0/0 trust"
"host all all 0.0.0.0/0 scram-sha-256" "host all all 0.0.0.0/0 scram-sha-256"
command: ["--execute-maintenance-script=${HAF_SOURCE_DIR}/scripts/maintenance-scripts/sleep_infinity.sh"] command: ["--execute-maintenance-script=${HAF_SOURCE_DIR}/scripts/maintenance-scripts/sleep_infinity.sh"]
.hafah_pytest_fuctional_tests_base: .hafah_pytest_fuctional_tests_base:
extends: .pytest_based_template extends: .pytest_based_template
stage: test stage: test
needs: needs:
- job: prepare_haf_image_testnet - job: prepare_haf_image_testnet
artifacts: true artifacts: true
- job: prepare_postgrest_hafah_image - job: prepare_postgrest_hafah_image
artifacts: true artifacts: true
services: services:
- *hfm-only-service - *hfm-only-service
- name: ${HAF_APP_IMAGE} - name: ${HAF_APP_IMAGE}
alias: app-setup alias: app-setup
variables: variables:
# intentionally use setup way chosed in haf_api_node compose scripts # intentionally use setup way chosed in haf_api_node compose scripts
POSTGRES_URL: "postgresql://haf_admin@hfm-only-instance/haf_block_log" POSTGRES_URL: "postgresql://haf_admin@hfm-only-instance/haf_block_log"
command: ["install_app"] command: ["install_app"]
entrypoint: entrypoint:
- '/bin/bash' - '/bin/bash'
- '-c' - '-c'
- | - |
set -xeuo pipefail set -xeuo pipefail
echo "Attempting to perform application setup..." echo "Attempting to perform application setup..."
# pass control to the default image entrypoint # pass control to the default image entrypoint
"./docker_entrypoint.sh" "$@" "./docker_entrypoint.sh" "$@"
echo "Application setup completed, starting to listed app port to satisfy Gitlab health checker..." echo "Application setup completed, starting to listed app port to satisfy Gitlab health checker..."
# Once setup completed, just listen on container/app port to satisfy GitlabCI HealthChecker # Once setup completed, just listen on container/app port to satisfy GitlabCI HealthChecker
nc -v -l -p $(echo "${HAF_APP_PORT}") nc -v -l -p $(echo "${HAF_APP_PORT}")
# arg $0 should be explicitly passed when using 'bash -c' entrypoints # arg $0 should be explicitly passed when using 'bash -c' entrypoints
- '/bin/bash' - '/bin/bash'
- name: ${HAF_APP_IMAGE} - name: ${HAF_APP_IMAGE}
alias: app alias: app
command: ["--postgres-url=postgresql://hafah_user@hfm-only-instance/haf_block_log"] command: ["--postgres-url=postgresql://hafah_user@hfm-only-instance/haf_block_log"]
entrypoint: entrypoint:
- '/bin/bash' - '/bin/bash'
- '-c' - '-c'
- | - |
set -xeuo pipefail set -xeuo pipefail
# since Gitlab services startup order is undefined, we need to wait for app setup completion # since Gitlab services startup order is undefined, we need to wait for app setup completion
"/home/hafah_user/app/scripts/wait_for_setup_completed.sh" "$@" "/home/hafah_user/app/scripts/wait_for_setup_completed.sh" "$@"
echo "Application setup finished - continue app-service spawn..." echo "Application setup finished - continue app-service spawn..."
# pass control to the default image entrypoint # pass control to the default image entrypoint
/home/hafah_user/docker_entrypoint.sh "$@" /home/hafah_user/docker_entrypoint.sh "$@"
# arg $0 should be explicitly passed when using 'bash -c' entrypoints # arg $0 should be explicitly passed when using 'bash -c' entrypoints
- '/bin/bash' - '/bin/bash'
variables: variables:
JUNIT_REPORT: tests/integration/functional/report.xml JUNIT_REPORT: tests/integration/functional/report.xml
PYTEST_BASED_IMAGE_NAME: $BUILDER_IMAGE_PATH PYTEST_BASED_IMAGE_NAME: $BUILDER_IMAGE_PATH
...@@ -469,13 +474,14 @@ prepare_haf_image_testnet: ...@@ -469,13 +474,14 @@ prepare_haf_image_testnet:
DIRECT_CALLS: 0 DIRECT_CALLS: 0
POSTGRES_URL: $DB_URL POSTGRES_URL: $DB_URL
script: script:
- echo "HAfAH image name $HAF_APP_IMAGE" - echo "HAfAH image name $HAF_APP_IMAGE"
- echo "HAF image name $HAF_IMAGE_NAME" - echo "HAF image name $HAF_IMAGE_NAME"
# run tests # run tests
- cd $CI_PROJECT_DIR/tests/integration/functional - cd $CI_PROJECT_DIR/tests/integration/functional
- pytest --junitxml report.xml --postgrest-hafah-adress=app:$APP_PORT --postgres-db-url=$DB_URL -m $PYTEST_MARK - pytest --junitxml report.xml --postgrest-hafah-adress=app:$APP_PORT --postgres-db-url=$DB_URL
-m $PYTEST_MARK
tags: tags:
- public-runner-docker - public-runner-docker
hafah_pytest_fuctional_tests_part1: hafah_pytest_fuctional_tests_part1:
extends: .hafah_pytest_fuctional_tests_base extends: .hafah_pytest_fuctional_tests_base
...@@ -495,10 +501,10 @@ deploy-wax-spec-dev-package: ...@@ -495,10 +501,10 @@ deploy-wax-spec-dev-package:
PACKAGE_TGZ_PATH: "${BUILT_PACKAGE_PATH}" PACKAGE_TGZ_PATH: "${BUILT_PACKAGE_PATH}"
NPM_PACKAGE_SCOPE: "@hiveio" NPM_PACKAGE_SCOPE: "@hiveio"
needs: needs:
- job: generate-wax-spec - job: generate-wax-spec
artifacts: true artifacts: true
tags: tags:
- public-runner-docker - public-runner-docker
deploy-wax-spec-production-public-npm: deploy-wax-spec-production-public-npm:
extends: .registry_npmjs_org_deploy_package_template extends: .registry_npmjs_org_deploy_package_template
...@@ -509,7 +515,7 @@ deploy-wax-spec-production-public-npm: ...@@ -509,7 +515,7 @@ deploy-wax-spec-production-public-npm:
SOURCE_DIR: "${PACKAGE_SOURCE_DIR}" SOURCE_DIR: "${PACKAGE_SOURCE_DIR}"
PACKAGE_TGZ_PATH: "${BUILT_PACKAGE_PATH}" PACKAGE_TGZ_PATH: "${BUILT_PACKAGE_PATH}"
needs: needs:
- job: generate-wax-spec - job: generate-wax-spec
artifacts: true artifacts: true
tags: tags:
- public-runner-docker - public-runner-docker
Subproject commit b10412889a6fa1a1321e2ad74954ec27931c97af Subproject commit e52e41222bf6586c39100ece8e1e64515df0302f
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment