Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • hive/hivemind
1 result
Show changes
Commits on Source (5)
Showing
with 391 additions and 1572 deletions
stages:
- build
- prepare
- sync
- benchmark
- publish
- collector
- cleanup
- build
- prepare
- sync
- benchmark
- publish
- collector
- cleanup
variables:
# HIVEMIND
......@@ -31,237 +31,236 @@ variables:
CI_DEBUG_SERVICES: "false" #All the service logs should be saved as artifacts, so it's fine to turn this off.
include:
- template: Workflows/Branch-Pipelines.gitlab-ci.yml
- project: hive/haf
ref: 6fa48e73dcf06140424d8c70e610fa421313ac0d # develop
file: /scripts/ci-helpers/prepare_data_image_job.yml # implicitly pulls base.gitlab-ci.yml from common-ci-configuration
- project: hive/common-ci-configuration
ref: e74d7109838ff05fdc239bced6a726aa7ad46a9b
file:
- /templates/cache_cleanup.gitlab-ci.yml
- '/templates/docker_image_jobs.gitlab-ci.yml'
- template: Workflows/Branch-Pipelines.gitlab-ci.yml
- project: hive/haf
ref: 2bcd0f8938dc574fd63a482ac7242499adf985c1 # develop
file: /scripts/ci-helpers/prepare_data_image_job.yml # implicitly pulls base.gitlab-ci.yml from common-ci-configuration
- project: hive/common-ci-configuration
ref: e74d7109838ff05fdc239bced6a726aa7ad46a9b
file:
- /templates/cache_cleanup.gitlab-ci.yml
- '/templates/docker_image_jobs.gitlab-ci.yml'
#>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>| ANCHORS |>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
.shared_tags:
tags: &shared_tags
- public-runner-docker
- hived-for-tests
- public-runner-docker
- hived-for-tests
.start-timer: &start-timer
- ./scripts/ci/timer.sh start
.start-timer:
- ./scripts/ci/timer.sh start
.check-timer: &check-timer
- ./scripts/ci/timer.sh check
.check-timer:
- ./scripts/ci/timer.sh check
#<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<| ANCHORS |<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
#>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>| BASH SCRIPTS |>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
.base_image_build_script: &base-image-build-script
- |
echo -e "\e[0Ksection_start:$(date +%s):login[collapsed=true]\r\e[0KLogging to Docker registry..."
docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
echo -e "\e[0Ksection_end:$(date +%s):login\r\e[0K"
echo -e "\e[0Ksection_start:$(date +%s):build[collapsed=true]\r\e[0KBuilding base Docker images..."
./scripts/ci/build_ci_base_image.sh
echo -e "\e[0Ksection_end:$(date +%s):build\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):login[collapsed=true]\r\e[0KLogging to Docker registry..."
docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
echo -e "\e[0Ksection_end:$(date +%s):login\r\e[0K"
echo -e "\e[0Ksection_start:$(date +%s):build[collapsed=true]\r\e[0KBuilding base Docker images..."
./scripts/ci/build_ci_base_image.sh
echo -e "\e[0Ksection_end:$(date +%s):build\r\e[0K"
.instance-build-script: &instance-build-script
- |
echo -e "\e[0Ksection_start:$(date +%s):login[collapsed=true]\r\e[0KLogging to Docker registry..."
docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" "$CI_REGISTRY"
echo -e "\e[0Ksection_end:$(date +%s):login\r\e[0K"
echo -e "\e[0Ksection_start:$(date +%s):build[collapsed=true]\r\e[0KBuilding Hivemind Docker image..."
./scripts/ci-helpers/build_instance.sh \
"$CI_COMMIT_SHORT_SHA" \
"$CI_PROJECT_DIR" \
"$CI_REGISTRY_IMAGE" \
--dot-env-filename=hivemind_image.env \
--dot-env-var-name=HIVEMIND_IMAGE
docker push "$CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA"
docker push "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_SHORT_SHA"
if [[ -n "$CI_COMMIT_TAG" ]]; then
docker pull "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_SHORT_SHA"
docker tag "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_SHORT_SHA" "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_TAG"
docker push "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_TAG"
docker tag "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_SHORT_SHA" "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_TAG"
docker push "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_TAG"
fi
cat hivemind_image.env
echo -e "\e[0Ksection_end:$(date +%s):build\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):login[collapsed=true]\r\e[0KLogging to Docker registry..."
docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" "$CI_REGISTRY"
echo -e "\e[0Ksection_end:$(date +%s):login\r\e[0K"
echo -e "\e[0Ksection_start:$(date +%s):build[collapsed=true]\r\e[0KBuilding Hivemind Docker image..."
./scripts/ci-helpers/build_instance.sh \
"$CI_COMMIT_SHORT_SHA" \
"$CI_PROJECT_DIR" \
"$CI_REGISTRY_IMAGE" \
--dot-env-filename=hivemind_image.env \
--dot-env-var-name=HIVEMIND_IMAGE
docker push "$CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA"
docker push "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_SHORT_SHA"
if [[ -n "$CI_COMMIT_TAG" ]]; then
docker pull "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_SHORT_SHA"
docker tag "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_SHORT_SHA" "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_TAG"
docker push "$CI_REGISTRY_IMAGE/instance:$CI_COMMIT_TAG"
docker tag "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_SHORT_SHA" "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_TAG"
docker push "$CI_REGISTRY_IMAGE/postgrest-rewriter:$CI_COMMIT_TAG"
fi
cat hivemind_image.env
echo -e "\e[0Ksection_end:$(date +%s):build\r\e[0K"
.bridge_api_smoketest-script: &bridge_api_smoketest-script
- |
echo -e "\e[0Ksection_start:$(date +%s):bridge_api_smoketest[collapsed=true]\r\e[0KRunning bridge API smoketest..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
bridge_api_patterns/ \
api_smoketest_bridge.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):bridge_api_smoketest\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):bridge_api_smoketest[collapsed=true]\r\e[0KRunning bridge API smoketest..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
bridge_api_patterns/ \
api_smoketest_bridge.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):bridge_api_smoketest\r\e[0K"
.bridge_api_smoketest_negative-script: &bridge_api_smoketest_negative-script
- |
echo -e "\e[0Ksection_start:$(date +%s):bridge_api_smoketest_negative[collapsed=true]\r\e[0KRunning bridge API smoketest negative..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
bridge_api_negative/ \
api_smoketest_bridge_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):bridge_api_smoketest_negative\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):bridge_api_smoketest_negative[collapsed=true]\r\e[0KRunning bridge API smoketest negative..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
bridge_api_negative/ \
api_smoketest_bridge_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):bridge_api_smoketest_negative\r\e[0K"
.condenser_api_smoketest-script: &condenser_api_smoketest-script
- |
echo -e "\e[0Ksection_start:$(date +%s):condenser_api_smoketest[collapsed=true]\r\e[0KRunning condenser API smoketest..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
condenser_api_patterns/ \
api_smoketest_condenser_api.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):condenser_api_smoketest\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):condenser_api_smoketest[collapsed=true]\r\e[0KRunning condenser API smoketest..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
condenser_api_patterns/ \
api_smoketest_condenser_api.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):condenser_api_smoketest\r\e[0K"
.condenser_api_smoketest_negative-script: &condenser_api_smoketest_negative-script
- |
echo -e "\e[0Ksection_start:$(date +%s):condenser_api_smoketest_negative[collapsed=true]\r\e[0KRunning condenser API smoketest negative..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
condenser_api_negative/ \
api_smoketest_condenser_api_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):condenser_api_smoketest_negative\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):condenser_api_smoketest_negative[collapsed=true]\r\e[0KRunning condenser API smoketest negative..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
condenser_api_negative/ \
api_smoketest_condenser_api_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):condenser_api_smoketest_negative\r\e[0K"
.database_api_smoketest-script: &database_api_smoketest-script
- |
echo -e "\e[0Ksection_start:$(date +%s):database_api_smoketest[collapsed=true]\r\e[0KRunning database API smoketest..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
database_api_patterns/ \
api_smoketest_database_api.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):database_api_smoketest\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):database_api_smoketest[collapsed=true]\r\e[0KRunning database API smoketest..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
database_api_patterns/ \
api_smoketest_database_api.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):database_api_smoketest\r\e[0K"
.database_api_smoketest_negative-script: &database_api_smoketest_negative-script
- |
echo -e "\e[0Ksection_start:$(date +%s):database_api_smoketest_negative[collapsed=true]\r\e[0KRunning database API smoketest negative..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
database_api_negative/ \
api_smoketest_database_api_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):database_api_smoketest_negative\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):database_api_smoketest_negative[collapsed=true]\r\e[0KRunning database API smoketest negative..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
database_api_negative/ \
api_smoketest_database_api_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):database_api_smoketest_negative\r\e[0K"
.follow_api_smoketest-script: &follow_api_smoketest-script
- |
echo -e "\e[0Ksection_start:$(date +%s):follow_api_smoketest[collapsed=true]\r\e[0KRunning follow API smoketest..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
follow_api_patterns/ \
api_smoketest_follow_api.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):follow_api_smoketest\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):follow_api_smoketest[collapsed=true]\r\e[0KRunning follow API smoketest..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
follow_api_patterns/ \
api_smoketest_follow_api.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):follow_api_smoketest\r\e[0K"
.follow_api_smoketest_negative-script: &follow_api_smoketest_negative-script
- |
echo -e "\e[0Ksection_start:$(date +%s):follow_api_smoketest_negative[collapsed=true]\r\e[0KRunning follow API smoketest negative..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
follow_api_negative/ \
api_smoketest_follow_api_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):follow_api_smoketest_negative\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):follow_api_smoketest_negative[collapsed=true]\r\e[0KRunning follow API smoketest negative..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
follow_api_negative/ \
api_smoketest_follow_api_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):follow_api_smoketest_negative\r\e[0K"
.tags_api_smoketest-script: &tags_api_smoketest-script
- |
echo -e "\e[0Ksection_start:$(date +%s):tags_api_smoketest[collapsed=true]\r\e[0KRunning tags API smoketest..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
tags_api_patterns/ \
api_smoketest_tags_api.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):tags_api_smoketest\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):tags_api_smoketest[collapsed=true]\r\e[0KRunning tags API smoketest..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
tags_api_patterns/ \
api_smoketest_tags_api.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):tags_api_smoketest\r\e[0K"
.tags_api_smoketest_negative-script: &tags_api_smoketest_negative-script
- |
echo -e "\e[0Ksection_start:$(date +%s):tags_api_smoketest_negative[collapsed=true]\r\e[0KRunning tags API smoketest negative..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
tags_api_negative/ \
api_smoketest_tags_api_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):tags_api_smoketest_negative\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):tags_api_smoketest_negative[collapsed=true]\r\e[0KRunning tags API smoketest negative..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
tags_api_negative/ \
api_smoketest_tags_api_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):tags_api_smoketest_negative\r\e[0K"
.mock_tests-script: &mock_tests-script
- |
echo -e "\e[0Ksection_start:$(date +%s):mock_tests[collapsed=true]\r\e[0KRunning mock tests..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
mock_tests/ \
api_smoketest_mock_tests.xml \
$RUNNER_PYTEST_WORKERS
echo -e "\e[0Ksection_end:$(date +%s):mock_tests\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):mock_tests[collapsed=true]\r\e[0KRunning mock tests..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
mock_tests/ \
api_smoketest_mock_tests.xml \
$RUNNER_PYTEST_WORKERS
echo -e "\e[0Ksection_end:$(date +%s):mock_tests\r\e[0K"
.hive_api_smoketest-script: &hive_api_smoketest-script
- |
echo -e "\e[0Ksection_start:$(date +%s):hive_api_smoketest[collapsed=true]\r\e[0KRunning Hive API smoketests..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
hive_api_patterns/ \
api_smoketest_hive_api.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):hive_api_smoketest\r\e[0K"
.api-benchmark-script: &api-benchmark-script
- |
echo -e "\e[0Ksection_start:$(date +%s):api-benchmark[collapsed=true]\r\e[0KRunning API benchmark..."
./scripts/ci/start-api-benchmarks.sh \
$RUNNER_HIVEMIND_BENCHMARK_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
$RUNNER_BENCHMARK_ITERATIONS \
$RUNNER_PYTEST_WORKERS
echo -e "\e[0Ksection_end:$(date +%s):api-benchmark\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):hive_api_smoketest[collapsed=true]\r\e[0KRunning Hive API smoketests..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
hive_api_patterns/ \
api_smoketest_hive_api.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):hive_api_smoketest\r\e[0K"
.api-benchmark-script:
- |
echo -e "\e[0Ksection_start:$(date +%s):api-benchmark[collapsed=true]\r\e[0KRunning API benchmark..."
./scripts/ci/start-api-benchmarks.sh \
$RUNNER_HIVEMIND_BENCHMARK_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
$RUNNER_BENCHMARK_ITERATIONS \
$RUNNER_PYTEST_WORKERS
echo -e "\e[0Ksection_end:$(date +%s):api-benchmark\r\e[0K"
.postgrest_negative-script: &postgrest_negative-script
- |
echo -e "\e[0Ksection_start:$(date +%s):postgrest_negative[collapsed=true]\r\e[0KRunning Postgrest negative smoketests..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
postgrest_negative/ \
postgrest_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):postgrest_negative\r\e[0K"
.hivemind-serve-script: &hivemind-serve-script
|
- |
echo -e "\e[0Ksection_start:$(date +%s):postgrest_negative[collapsed=true]\r\e[0KRunning Postgrest negative smoketests..."
./scripts/ci/start-api-smoketest.sh \
$RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME \
$RUNNER_HIVEMIND_SERVER_HTTP_PORT \
postgrest_negative/ \
postgrest_negative.xml \
$RUNNER_PYTEST_WORKERS \
"auto"
echo -e "\e[0Ksection_end:$(date +%s):postgrest_negative\r\e[0K"
.hivemind-serve-script: &hivemind-serve-script |
${DATA_CACHE_HAF}/await -t 5m http://haf-instance:8091 -- echo "HAF ready" 2>&1 | tee -i "$AWAIT_LOG_PATH" && \
${DATA_CACHE_HAF}/await -t 5m postgres://haf_admin@haf-instance:5432/haf_block_log#schemas=hivemind_app -- echo "Hivemind database found" 2>&1 | tee -a -i "$AWAIT_LOG_PATH" && \
${WORKING_DIR}/docker_entrypoint.sh server \
......@@ -272,8 +271,7 @@ include:
--database-url="${HAF_POSTGRES_URL}"
.hivemind-postgrest-serve-script: &hivemind-postgrest-serve-script
|
.hivemind-postgrest-serve-script: &hivemind-postgrest-serve-script |
${DATA_CACHE_HAF}/await -t 5m http://haf-instance:8091 -- echo "HAF ready" 2>&1 | tee -i "$AWAIT_LOG_PATH" && \
${DATA_CACHE_HAF}/await -t 5m postgres://haf_admin@haf-instance:5432/haf_block_log#schemas=hivemind_app -- echo "Hivemind database found" 2>&1 | tee -a -i "$AWAIT_LOG_PATH" && \
${WORKING_DIR}/docker_entrypoint.sh postgrest-server --webserver-port=${RUNNER_HIVEMIND_SERVER_HTTP_PORT}
......@@ -287,9 +285,9 @@ prepare_base_images:
stage: build
extends: .docker_image_builder_job_template
before_script:
- git config --global --add safe.directory $CI_PROJECT_DIR
- git config --global --add safe.directory $CI_PROJECT_DIR
script:
- *base-image-build-script
- *base-image-build-script
tags: *shared_tags
prepare_haf_image:
......@@ -300,38 +298,38 @@ prepare_haf_image:
REGISTRY_USER: "$HAF_IMG_BUILDER_USER"
REGISTRY_PASS: "$HAF_IMG_BUILDER_PASSWORD"
before_script:
- git config --global --add safe.directory $CI_PROJECT_DIR/haf
- git config --global --add safe.directory $CI_PROJECT_DIR/haf
tags: *shared_tags
prepare_haf_data:
extends: .prepare_haf_data_5m
needs:
- job: prepare_haf_image
artifacts: true
- job: prepare_haf_image
artifacts: true
stage: build
variables:
SUBMODULE_DIR: "$CI_PROJECT_DIR/haf"
BLOCK_LOG_SOURCE_DIR: $BLOCK_LOG_SOURCE_DIR_5M
CONFIG_INI_SOURCE: "$CI_PROJECT_DIR/haf/docker/config_5M.ini"
tags:
- data-cache-storage
- data-cache-storage
prepare_hivemind_image:
stage: build
extends: .docker_image_builder_job_template
needs:
- prepare_base_images
- prepare_base_images
before_script:
- git config --global --add safe.directory $CI_PROJECT_DIR
- git config --global --add safe.directory $CI_PROJECT_DIR
script:
- *instance-build-script
- *instance-build-script
artifacts:
when: always
expire_in: 7 days
reports:
dotenv: hivemind_image.env
paths:
- hivemind_image.env
- hivemind_image.env
tags: *shared_tags
cleanup_hivemind_haf_cache_manual:
......@@ -340,7 +338,7 @@ cleanup_hivemind_haf_cache_manual:
variables:
CLEANUP_PATH_PATTERN: "/cache/replay_data_hivemind_*"
tags:
- data-cache-storage
- data-cache-storage
# This job cleans up both Hivemind-specific cache and cache shared between projects, so it should be used in emergencies only.
# For example, if prepare_haf_data job fails in the same way in Hivemind and HAfAH.
......@@ -350,23 +348,23 @@ cleanup_haf_cache_manual:
variables:
CLEANUP_PATH_PATTERN: "/cache/replay_data_hivemind_* /cache/replay_data_haf_*"
tags:
- data-cache-storage
- data-cache-storage
# Cleans up cahce after tests. Must depend on all jobs that use
# this cache
cleanup_pipeline_cache:
needs:
- sync
- sync
#- e2e_benchmark
- e2e_benchmark_on_postgrest
extends:
- .cleanup_cache_manual_template
- e2e_benchmark_on_postgrest
extends:
- .cleanup_cache_manual_template
stage: cleanup
variables:
CLEANUP_PATH_PATTERN: "${DATA_CACHE_HIVEMIND}"
when: always
tags:
- data-cache-storage
- data-cache-storage
download_await:
extends: .job-defaults
......@@ -377,50 +375,48 @@ download_await:
AWAIT_PACKAGE_URL: $CI_API_V4_URL/projects/440/packages/generic/await/v1.3.2.1/await
DATA_CACHE_HAF: "${DATA_CACHE_HAF_PREFIX}_${HAF_COMMIT}"
needs:
- job: prepare_haf_data
artifacts: true
- job: prepare_haf_data
artifacts: true
script:
- |
if [[ -e "${DATA_CACHE_HAF}/await" ]]; then
echo "Await is already present in ${DATA_CACHE_HAF}"
ls -lAh "${DATA_CACHE_HAF}"
else
echo "Downloading await..."
wget --header "JOB-TOKEN: $CI_JOB_TOKEN" "$AWAIT_PACKAGE_URL" -O "${DATA_CACHE_HAF}/await"
chmod +x ${DATA_CACHE_HAF}/await
fi
- |
if [[ -e "${DATA_CACHE_HAF}/await" ]]; then
echo "Await is already present in ${DATA_CACHE_HAF}"
ls -lAh "${DATA_CACHE_HAF}"
else
echo "Downloading await..."
wget --header "JOB-TOKEN: $CI_JOB_TOKEN" "$AWAIT_PACKAGE_URL" -O "${DATA_CACHE_HAF}/await"
chmod +x ${DATA_CACHE_HAF}/await
fi
tags:
- data-cache-storage
- data-cache-storage
sync:
extends: .job-defaults
image:
image:
name: $HIVEMIND_IMAGE
entrypoint: [""]
stage: sync
interruptible: true
needs:
- job: prepare_haf_data
artifacts: true
- job: prepare_hivemind_image
artifacts: true
- job: download_await
artifacts: false
- job: prepare_haf_data
artifacts: true
- job: prepare_hivemind_image
artifacts: true
- job: download_await
artifacts: false
when: on_success
services:
- name: $HAF_IMAGE_NAME
alias: haf-instance
variables:
PG_ACCESS: "
host haf_block_log hivemind all trust\n
host haf_block_log haf_admin all trust\n
"
DATA_SOURCE: $DATA_CACHE_HAF
DATADIR: $DATA_CACHE_HIVEMIND_DATADIR
SHM_DIR: $DATA_CACHE_HIVEMIND_SHM_DIR
LOG_FILE: $CI_JOB_NAME.log
PGCTLTIMEOUT: 600 # give PostgreSQL more time to start if GitLab shut it down improperly after the replay job
command: ["--replay-blockchain", "--stop-at-block=5000000"]
- name: $HAF_IMAGE_NAME
alias: haf-instance
variables:
PG_ACCESS: " host haf_block_log hivemind all trust\n host \
\ haf_block_log haf_admin all trust\n "
DATA_SOURCE: $DATA_CACHE_HAF
DATADIR: $DATA_CACHE_HIVEMIND_DATADIR
SHM_DIR: $DATA_CACHE_HIVEMIND_SHM_DIR
LOG_FILE: $CI_JOB_NAME.log
PGCTLTIMEOUT: 600 # give PostgreSQL more time to start if GitLab shut it down improperly after the replay job
command: ["--replay-blockchain", "--stop-at-block=5000000"]
variables:
GIT_STRATEGY: none
RUNNER_HIVEMIND_SYNC_MAX_BLOCK: 5000024
......@@ -434,45 +430,46 @@ sync:
DATA_CACHE_HIVEMIND_SHM_DIR: "${DATA_CACHE_HIVEMIND_DATADIR}/blockchain"
USE_POSTGREST: 1
script:
- |
sleep 20s
cat ${WORKING_DIR}/.hivemind-venv/lib/python3.8/site-packages/hive/_version.py > version.log
pushd ${WORKING_DIR}/app
${DATA_CACHE_HAF}/await -t 5m http://haf-instance:8091 -- echo "HAF ready"
ci/wait-for-postgres.sh ${HAF_ADMIN_POSTGRES_URL}
pushd ${WORKING_DIR}
${WORKING_DIR}/docker_entrypoint.sh setup \
--database-admin-url="${HAF_ADMIN_POSTGRES_URL}" \
--with-reptracker \
--add-mocks=${ADD_MOCKS}
${WORKING_DIR}/app/reputation_tracker/scripts/process_blocks.sh \
--stop-at-block="${RUNNER_HIVEMIND_SYNC_IRREVERSIBLE_MAX_BLOCK}" \
--postgres-url="${HAF_POSTGRES_URL}"
${WORKING_DIR}/docker_entrypoint.sh sync \
--log-mask-sensitive-data \
--pid-file hive_sync.pid \
--test-max-block="${RUNNER_HIVEMIND_SYNC_MAX_BLOCK}" \
--test-profile=False \
--prometheus-port 11011 \
--database-url="${HAF_POSTGRES_URL}" \
--community-start-block 4998000
pushd +2
${WORKING_DIR}/app/ci/collect-db-stats.sh
${WORKING_DIR}/app/reputation_tracker/scripts/process_blocks.sh \
--stop-at-block="${RUNNER_HIVEMIND_SYNC_MAX_BLOCK}" \
--postgres-url="${HAF_POSTGRES_URL}"
- |
sleep 20s
cat ${WORKING_DIR}/.hivemind-venv/lib/python3.8/site-packages/hive/_version.py > version.log
pushd ${WORKING_DIR}/app
${DATA_CACHE_HAF}/await -t 5m http://haf-instance:8091 -- echo "HAF ready"
ci/wait-for-postgres.sh ${HAF_ADMIN_POSTGRES_URL}
pushd ${WORKING_DIR}
${WORKING_DIR}/docker_entrypoint.sh setup \
--database-admin-url="${HAF_ADMIN_POSTGRES_URL}" \
--with-reptracker \
--add-mocks=${ADD_MOCKS}
${WORKING_DIR}/app/reputation_tracker/scripts/process_blocks.sh \
--stop-at-block="${RUNNER_HIVEMIND_SYNC_IRREVERSIBLE_MAX_BLOCK}" \
--postgres-url="${HAF_POSTGRES_URL}"
${WORKING_DIR}/docker_entrypoint.sh sync \
--log-mask-sensitive-data \
--pid-file hive_sync.pid \
--test-max-block="${RUNNER_HIVEMIND_SYNC_MAX_BLOCK}" \
--test-profile=False \
--prometheus-port 11011 \
--database-url="${HAF_POSTGRES_URL}" \
--community-start-block 4998000
pushd +2
${WORKING_DIR}/app/ci/collect-db-stats.sh
${WORKING_DIR}/app/reputation_tracker/scripts/process_blocks.sh \
--stop-at-block="${RUNNER_HIVEMIND_SYNC_MAX_BLOCK}" \
--postgres-url="${HAF_POSTGRES_URL}"
after_script:
- cp "$DATA_CACHE_HIVEMIND_DATADIR/$CI_JOB_NAME.log" "haf-$CI_JOB_NAME.log" || true # in after_script, so it's done even if the job fails
- cp "$DATA_CACHE_HIVEMIND_DATADIR/$CI_JOB_NAME.log" "haf-$CI_JOB_NAME.log" ||
true # in after_script, so it's done even if the job fails
artifacts:
when: always
expire_in: 7 days
paths:
- hivemind-sync.log
- pg-stats
- version.log
- haf-$CI_JOB_NAME.log
- hivemind-sync.log
- pg-stats
- version.log
- haf-$CI_JOB_NAME.log
tags:
- data-cache-storage
- data-cache-storage
# e2e_benchmark:
# image: $CI_REGISTRY_IMAGE/ci-base-image:python-3.8-slim-6
......@@ -602,52 +599,50 @@ e2e_benchmark_on_postgrest:
stage: benchmark
interruptible: true
needs:
- job: prepare_haf_data
artifacts: true
- job: prepare_hivemind_image
artifacts: true
- job: download_await
artifacts: false
- job: sync
artifacts: true
- job: prepare_haf_data
artifacts: true
- job: prepare_hivemind_image
artifacts: true
- job: download_await
artifacts: false
- job: sync
artifacts: true
# - job: e2e_benchmark
# artifacts: false
when: on_success
services:
- name: $HAF_IMAGE_NAME
alias: haf-instance
variables:
PG_ACCESS: "
host haf_block_log hivemind all trust\n
host haf_block_log haf_admin all trust\n
"
DATADIR: $DATA_CACHE_HIVEMIND_DATADIR
SHM_DIR: $DATA_CACHE_HIVEMIND_SHM_DIR
LOG_FILE: $CI_JOB_NAME.log
PGCTLTIMEOUT: 600 # give PostgreSQL more time to start if GitLab shut it down improperly after the sync job
command: ["--replay-blockchain", "--stop-at-block=5000000"]
- name: $HIVEMIND_IMAGE
alias: hivemind-smoketest # cannot be a variable
entrypoint:
- bash
- -c
- *hivemind-postgrest-serve-script
variables:
WORKING_DIR: /home/hivemind
LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/hivemind-server.log
AWAIT_LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/hivemind-server-await.log
REQUEST_PATH_LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/request_process_times_smoketests.log
- name: $HIVEMIND_IMAGE
alias: hivemind-benchmark # cannot be a variable
entrypoint:
- bash
- -c
- *hivemind-postgrest-serve-script
variables:
WORKING_DIR: /home/hivemind
LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/hivemind-benchmark-server.log
AWAIT_LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/hivemind-benchmark-server-await.log
REQUEST_PATH_LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/request_process_times.log
- name: $HAF_IMAGE_NAME
alias: haf-instance
variables:
PG_ACCESS: " host haf_block_log hivemind all trust\n host \
\ haf_block_log haf_admin all trust\n "
DATADIR: $DATA_CACHE_HIVEMIND_DATADIR
SHM_DIR: $DATA_CACHE_HIVEMIND_SHM_DIR
LOG_FILE: $CI_JOB_NAME.log
PGCTLTIMEOUT: 600 # give PostgreSQL more time to start if GitLab shut it down improperly after the sync job
command: ["--replay-blockchain", "--stop-at-block=5000000"]
- name: $HIVEMIND_IMAGE
alias: hivemind-smoketest # cannot be a variable
entrypoint:
- bash
- -c
- *hivemind-postgrest-serve-script
variables:
WORKING_DIR: /home/hivemind
LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/hivemind-server.log
AWAIT_LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/hivemind-server-await.log
REQUEST_PATH_LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/request_process_times_smoketests.log
- name: $HIVEMIND_IMAGE
alias: hivemind-benchmark # cannot be a variable
entrypoint:
- bash
- -c
- *hivemind-postgrest-serve-script
variables:
WORKING_DIR: /home/hivemind
LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/hivemind-benchmark-server.log
AWAIT_LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/hivemind-benchmark-server-await.log
REQUEST_PATH_LOG_PATH: $DATA_CACHE_HIVEMIND_DATADIR/request_process_times.log
variables:
HIVED_UID: $HIVED_UID
JOB_TOKEN: $CI_JOB_TOKEN
......@@ -660,52 +655,52 @@ e2e_benchmark_on_postgrest:
# Missing variable enables the entire test group.
RUN_TESTS_WITH_MARKER: "not postgrest_ignore"
script:
- |
echo "HAF image name $HAF_IMAGE_NAME"
echo "Hivemind image name $HIVEMIND_IMAGE"
SMOKETEST_AWAIT_URL="tcp://${RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME}:${RUNNER_HIVEMIND_SERVER_HTTP_PORT}"
echo "Waiting for Hivemind smoketest server to start running on ${SMOKETEST_AWAIT_URL}"
"${DATA_CACHE_HAF}/await" -t 10m "${SMOKETEST_AWAIT_URL}" -- echo "Hivemind smoketest instance is running"
- *condenser_api_smoketest-script
- *condenser_api_smoketest_negative-script
- *follow_api_smoketest-script
- *follow_api_smoketest_negative-script
- *bridge_api_smoketest-script
- *bridge_api_smoketest_negative-script
- *tags_api_smoketest-script
- *tags_api_smoketest_negative-script
- *database_api_smoketest-script
- *database_api_smoketest_negative-script
- *hive_api_smoketest-script
- *postgrest_negative-script
- *mock_tests-script
- |
BENCHMARK_AWAIT_URL="tcp://${RUNNER_HIVEMIND_BENCHMARK_SERVER_HOSTNAME}:${RUNNER_HIVEMIND_SERVER_HTTP_PORT}"
echo "Waiting for Hivemind benchmark server to start running on ${BENCHMARK_AWAIT_URL}"
"${DATA_CACHE_HAF}/await" -t 10m "${BENCHMARK_AWAIT_URL}" -- echo "Hivemind benchmark instance is running"
# TODO: Uncomment anchors to enable a test group. To test only selected methods, add their names to the environment variable
- |
echo "HAF image name $HAF_IMAGE_NAME"
echo "Hivemind image name $HIVEMIND_IMAGE"
SMOKETEST_AWAIT_URL="tcp://${RUNNER_HIVEMIND_SMOKETEST_SERVER_HOSTNAME}:${RUNNER_HIVEMIND_SERVER_HTTP_PORT}"
echo "Waiting for Hivemind smoketest server to start running on ${SMOKETEST_AWAIT_URL}"
"${DATA_CACHE_HAF}/await" -t 10m "${SMOKETEST_AWAIT_URL}" -- echo "Hivemind smoketest instance is running"
- *condenser_api_smoketest-script
- *condenser_api_smoketest_negative-script
- *follow_api_smoketest-script
- *follow_api_smoketest_negative-script
- *bridge_api_smoketest-script
- *bridge_api_smoketest_negative-script
- *tags_api_smoketest-script
- *tags_api_smoketest_negative-script
- *database_api_smoketest-script
- *database_api_smoketest_negative-script
- *hive_api_smoketest-script
- *postgrest_negative-script
- *mock_tests-script
- |
BENCHMARK_AWAIT_URL="tcp://${RUNNER_HIVEMIND_BENCHMARK_SERVER_HOSTNAME}:${RUNNER_HIVEMIND_SERVER_HTTP_PORT}"
echo "Waiting for Hivemind benchmark server to start running on ${BENCHMARK_AWAIT_URL}"
"${DATA_CACHE_HAF}/await" -t 10m "${BENCHMARK_AWAIT_URL}" -- echo "Hivemind benchmark instance is running"
# TODO: Uncomment anchors to enable a test group. To test only selected methods, add their names to the environment variable
# - *api-benchmark-script
after_script:
- |
echo -e "\e[0Ksection_start:$(date +%s):logs[collapsed=true]\r\e[0KCollecting logs..."
ls -lah "${DATA_CACHE_HIVEMIND_DATADIR}"
cp "${DATA_CACHE_HIVEMIND_DATADIR}/${CI_JOB_NAME}.log" "haf-$CI_JOB_NAME.log" || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/request_process_times_smoketests.log" request_process_times_smoketests.log || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/hivemind-server.log" hivemind-server.log || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/hivemind-server-await.log" hivemind-server-await.log || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/request_process_times.log" request_process_times.log || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/hivemind-benchmark-server.log" hivemind-benchmark-server.log || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/hivemind-benchmark-server-await.log" hivemind-benchmark-server-await.log || true
echo -e "\e[0Ksection_end:$(date +%s):logs\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):dotenv[collapsed=true]\r\e[0KPreparing dotenv file..."
{
echo "ARTIFACTS_JOB_ID=$CI_JOB_ID"
echo "APP_VERSION=$(python -c "with open('version.log') as f: exec(f.read()); print(__version__)")"
echo "SERVER_NAME=$CI_RUNNER_DESCRIPTION"
} > variables.env
cat variables.env
echo -e "\e[0Ksection_end:$(date +%s):dotenv\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):logs[collapsed=true]\r\e[0KCollecting logs..."
ls -lah "${DATA_CACHE_HIVEMIND_DATADIR}"
cp "${DATA_CACHE_HIVEMIND_DATADIR}/${CI_JOB_NAME}.log" "haf-$CI_JOB_NAME.log" || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/request_process_times_smoketests.log" request_process_times_smoketests.log || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/hivemind-server.log" hivemind-server.log || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/hivemind-server-await.log" hivemind-server-await.log || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/request_process_times.log" request_process_times.log || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/hivemind-benchmark-server.log" hivemind-benchmark-server.log || true
cp "${DATA_CACHE_HIVEMIND_DATADIR}/hivemind-benchmark-server-await.log" hivemind-benchmark-server-await.log || true
echo -e "\e[0Ksection_end:$(date +%s):logs\r\e[0K"
- |
echo -e "\e[0Ksection_start:$(date +%s):dotenv[collapsed=true]\r\e[0KPreparing dotenv file..."
{
echo "ARTIFACTS_JOB_ID=$CI_JOB_ID"
echo "APP_VERSION=$(python -c "with open('version.log') as f: exec(f.read()); print(__version__)")"
echo "SERVER_NAME=$CI_RUNNER_DESCRIPTION"
} > variables.env
cat variables.env
echo -e "\e[0Ksection_end:$(date +%s):dotenv\r\e[0K"
artifacts:
when: always
expire_in: 7 days
......@@ -713,44 +708,46 @@ e2e_benchmark_on_postgrest:
junit: "*.xml"
dotenv: variables.env
paths:
- "*.xml"
- haf-$CI_JOB_NAME.log
- hivemind-sync.log
- hivemind-server.log
- hivemind-benchmark-server.log
- pg-stats
- tests/api_tests/hivemind/tavern/**/*.out.json
- request_process_times.log
- request_process_times_smoketests.log
- version.log
- hivemind-server-await.log
- hivemind-benchmark-server-await.log
- "*.xml"
- haf-$CI_JOB_NAME.log
- hivemind-sync.log
- hivemind-server.log
- hivemind-benchmark-server.log
- pg-stats
- tests/api_tests/hivemind/tavern/**/*.out.json
- request_process_times.log
- request_process_times_smoketests.log
- version.log
- hivemind-server-await.log
- hivemind-benchmark-server-await.log
tags:
- data-cache-storage
- data-cache-storage
build_and_publish_image:
stage: publish
extends: .publish_docker_image_template
before_script:
- !reference [.publish_docker_image_template, before_script]
- !reference [.publish_docker_image_template, before_script]
script:
- |
TAG=$(echo "$CI_COMMIT_TAG" | sed 's/[!+]/-/g')
scripts/ci-helpers/build_and_publish_instance.sh --image-tag=$TAG
- |
TAG=$(echo "$CI_COMMIT_TAG" | sed 's/[!+]/-/g')
scripts/ci-helpers/build_and_publish_instance.sh --image-tag=$TAG
docker tag "$CI_REGISTRY_IMAGE/postgrest-rewriter:$TAG" "registry-upload.hive.blog/hivemind/postgrest-rewriter:$TAG"
docker push "registry-upload.hive.blog/hivemind/postgrest-rewriter:$TAG"
tags: *shared_tags
Trigger benchmark-results-collector:
stage: collector
needs:
- job: e2e_benchmark_on_postgrest
artifacts: true # Even though variables.env is not a regular artifact, but a dotenv report, this still needs to be set to true
- job: e2e_benchmark_on_postgrest
artifacts: true # Even though variables.env is not a regular artifact, but a dotenv report, this still needs to be set to true
rules:
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: on_success
- if: '$CI_COMMIT_BRANCH == "develop"'
when: on_success
- when: manual
allow_failure: true
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: on_success
- if: '$CI_COMMIT_BRANCH == "develop"'
when: on_success
- when: manual
allow_failure: true
variables:
ARTIFACTS_URL: https://gitlab.syncad.com/api/v4/projects/$CI_PROJECT_ID/jobs/$ARTIFACTS_JOB_ID/artifacts
PRIVATE_TOKEN: $READ_ARTIFACT_ACCESS_TOKEN
......
Subproject commit 70155bed6cc86145bc6094a2a8b0e7cf0143b30b
Subproject commit 7f9f79c35c8bafe29aeaab134a0e5802857d9a7b
......@@ -137,7 +137,6 @@ class DbState:
'hive_posts_category_id_payout_plus_pending_payout_depth_idx',
'hive_posts_author_id_created_at_id_idx',
'hive_posts_author_id_id_idx',
'hive_posts_api_helper_author_s_permlink_idx',
'hive_votes_voter_id_last_update_idx',
'hive_votes_block_num_idx',
'hive_subscriptions_block_num_idx',
......@@ -408,14 +407,6 @@ class DbState:
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql)
log.info("[MASSIVE] update_hive_posts_root_id executed in %.4fs", perf_counter() - time_start)
@classmethod
def _finish_hive_posts_api_helper(cls, db, last_imported_block, current_imported_block):
with AutoDbDisposer(db, "finish_hive_posts_api_helper") as db_mgr:
time_start = perf_counter()
sql = f"SELECT {SCHEMA_NAME}.update_hive_posts_api_helper({last_imported_block}, {current_imported_block});"
cls._execute_query_with_modified_work_mem(db=db_mgr.db, sql=sql)
log.info("[MASSIVE] update_hive_posts_api_helper executed in %.4fs", perf_counter() - time_start)
@classmethod
def _finish_hive_feed_cache(cls, db, last_imported_block, current_imported_block):
with AutoDbDisposer(db, "finish_hive_feed_cache") as db_mgr:
......@@ -526,14 +517,8 @@ class DbState:
methods = [
('notification_cache', cls._finish_notification_cache, [cls.db()]),
('follow_count', cls._finish_follow_count, [cls.db(), last_imported_block, current_imported_block]),
(
'hive_posts_api_helper',
cls._finish_hive_posts_api_helper,
[cls.db(), last_imported_block, current_imported_block],
),
]
# Notifications are dependent on many tables, therefore it's necessary to calculate it at the end
# hive_posts_api_helper is dependent on `hive_posts/root_id` filling
cls.process_tasks_in_threads("[MASSIVE] %i threads finished filling tables. Part nr 1", methods)
real_time = FOSM.stop(start_time)
......
......@@ -349,18 +349,6 @@ def build_metadata():
sa.Column('hivemind_git_rev', sa.Text, nullable=False, server_default=''),
)
# hive_posts_api_helper is only used by list_comments, so this should be removed with that api call
sa.Table(
'hive_posts_api_helper',
metadata,
sa.Column('hive_rowid', sa.BigInteger, server_default=hive_rowid_seq.next_value(), nullable=False),
sa.Column('id', sa.Integer, primary_key=True, autoincrement=False),
sa.Column(
'author_s_permlink', VARCHAR(275, collation='C'), nullable=False
), # concatenation of author '/' permlink
sa.Index('hive_posts_api_helper_author_s_permlink_idx', 'author_s_permlink'),
)
sa.Table(
'hive_mentions',
metadata,
......@@ -672,8 +660,6 @@ def setup_runtime_code(db):
"condenser_follows.sql",
"hot_and_trends.sql",
"update_hive_posts_children_count.sql",
"update_hive_posts_api_helper.sql",
"database_api_list_comments.sql",
"database_api_list_votes.sql",
"update_posts_rshares.sql",
"update_hive_post_root_id.sql",
......@@ -761,8 +747,6 @@ def setup_runtime_code(db):
"postgrest/utilities/create_database_post_object.sql",
"postgrest/database_api/database_api_find_comments.sql",
"postgrest/utilities/valid_date.sql",
"postgrest/utilities/list_comments.sql",
"postgrest/database_api/database_api_list_comments.sql",
"postgrest/bridge_api/bridge_api_list_subscribers.sql",
"postgrest/bridge_api/bridge_api_get_trending_topics.sql",
"postgrest/bridge_api/bridge_api_list_communities.sql",
......
DROP TYPE IF EXISTS hivemind_app.database_api_post CASCADE;
CREATE TYPE hivemind_app.database_api_post AS (
id INT,
community_id INT,
author VARCHAR(16),
permlink VARCHAR(255),
title VARCHAR(512),
body TEXT,
category VARCHAR(255),
depth SMALLINT,
promoted DECIMAL(10,3),
payout DECIMAL(10,3),
last_payout_at TIMESTAMP,
cashout_time TIMESTAMP,
is_paidout BOOLEAN,
children INT,
votes INT,
created_at TIMESTAMP,
updated_at TIMESTAMP,
rshares NUMERIC,
json TEXT,
is_hidden BOOLEAN,
is_grayed BOOLEAN,
total_votes BIGINT,
net_votes BIGINT,
total_vote_weight NUMERIC,
parent_author VARCHAR(16),
parent_permlink_or_category VARCHAR(255),
curator_payout_value VARCHAR(30),
root_author VARCHAR(16),
root_permlink VARCHAR(255),
max_accepted_payout VARCHAR(30),
percent_hbd INT,
allow_replies BOOLEAN,
allow_votes BOOLEAN,
allow_curation_rewards BOOLEAN,
beneficiaries JSON,
url TEXT,
root_title VARCHAR(512),
abs_rshares NUMERIC,
active TIMESTAMP,
author_rewards BIGINT,
muted_reasons INTEGER
);
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_permlink(character varying, character varying, int);
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_permlink(
in _author hivemind_app.hive_accounts.name%TYPE,
in _permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF hivemind_app.database_api_post
AS
$function$
BEGIN
RETURN QUERY
WITH comments AS MATERIALIZED -- list_comments_by_permlink
(
SELECT
hph.id,
hph.author_s_permlink
FROM hivemind_app.hive_posts_api_helper hph
JOIN hivemind_app.live_posts_comments_view hp ON hp.id = hph.id
WHERE hph.author_s_permlink >= _author || '/' || _permlink
AND NOT hp.is_muted -- all the mute checks in this file look insufficient, but maybe no one uses these API calls?
AND hph.id != 0 -- what does this do?
ORDER BY hph.author_s_permlink
LIMIT _limit
)
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards, hp.muted_reasons
FROM comments,
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY hp.author, hp.permlink
LIMIT _limit;
END;
$function$
LANGUAGE plpgsql STABLE;
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_cashout_time(timestamp, character varying, character varying, int);
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_cashout_time(
in _cashout_time timestamp,
in _author hivemind_app.hive_accounts.name%TYPE,
in _permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF hivemind_app.database_api_post
AS
$function$
DECLARE
__post_id INT;
BEGIN
__post_id = hivemind_app.find_comment_id(_author,_permlink, True);
RETURN QUERY
WITH comments AS MATERIALIZED -- list_comments_by_cashout_time
(
SELECT
hp1.id,
hp1.cashout_time
FROM hivemind_app.live_posts_comments_view hp1
WHERE NOT hp1.is_muted
AND hp1.cashout_time > _cashout_time
OR hp1.cashout_time = _cashout_time
AND hp1.id >= __post_id AND hp1.id != 0
ORDER BY
hp1.cashout_time ASC,
hp1.id ASC
LIMIT _limit
)
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards, hp.muted_reasons
FROM comments,
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY comments.cashout_time ASC, comments.id ASC
LIMIT _limit
;
END
$function$
LANGUAGE plpgsql STABLE;
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_root(character varying, character varying, character varying, character varying, int);
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_root(
in _root_author hivemind_app.hive_accounts.name%TYPE,
in _root_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _start_post_author hivemind_app.hive_accounts.name%TYPE,
in _start_post_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF hivemind_app.database_api_post
AS
$function$
DECLARE
__root_id INT;
__post_id INT;
BEGIN
__root_id = hivemind_app.find_comment_id(_root_author, _root_permlink, True);
__post_id = hivemind_app.find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
WITH comments AS MATERIALIZED -- list_comments_by_root
(
SELECT hp.id
FROM hivemind_app.live_posts_comments_view hp
WHERE hp.root_id = __root_id
AND NOT hp.is_muted
AND (__post_id = 0 OR hp.id >= __post_id)
ORDER BY hp.id ASC
LIMIT _limit
)
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards, hp.muted_reasons
FROM comments,
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY comments.id
LIMIT _limit;
END
$function$
LANGUAGE plpgsql STABLE;
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_parent(character varying, character varying, character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_parent(
in _parent_author hivemind_app.hive_accounts.name%TYPE,
in _parent_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _start_post_author hivemind_app.hive_accounts.name%TYPE,
in _start_post_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF hivemind_app.database_api_post
AS $function$
DECLARE
__post_id INT;
__parent_id INT;
BEGIN
__parent_id = hivemind_app.find_comment_id(_parent_author, _parent_permlink, True);
__post_id = hivemind_app.find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
WITH comments AS MATERIALIZED -- list_comments_by_parent
(
SELECT hp.id
FROM hivemind_app.live_posts_comments_view hp
WHERE hp.parent_id = __parent_id
AND NOT hp.is_muted
-- AND (__post_id = 0 OR hp.id > __post_id) --DLN I think correct version should look like this to avoid dups in paging, but we should get rid of all list_comments instead probably, so not going to fix it nwo in all the places
AND (__post_id = 0 OR hp.id >= __post_id)
ORDER BY hp.id ASC
LIMIT _limit
)
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards, hp.muted_reasons
FROM comments,
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY comments.id
LIMIT _limit;
END
$function$
LANGUAGE plpgsql STABLE;
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_last_update(character varying, timestamp, character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_last_update(
in _parent_author hivemind_app.hive_accounts.name%TYPE,
in _updated_at hivemind_app.hive_posts.updated_at%TYPE,
in _start_post_author hivemind_app.hive_accounts.name%TYPE,
in _start_post_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF hivemind_app.database_api_post
AS
$function$
DECLARE
__post_id INT;
__parent_author_id INT;
BEGIN
__parent_author_id = hivemind_app.find_account_id(_parent_author, True);
__post_id = hivemind_app.find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
WITH comments AS MATERIALIZED -- list_comments_by_last_update
(
SELECT
hp1.id,
hp1.updated_at
FROM hivemind_app.live_posts_comments_view hp1
JOIN hivemind_app.hive_posts hp2 ON hp1.parent_id = hp2.id
WHERE hp2.author_id = __parent_author_id
AND NOT hp1.is_muted
AND (
hp1.updated_at < _updated_at
OR hp1.updated_at = _updated_at AND hp1.id >= __post_id AND hp1.id != 0
)
ORDER BY hp1.updated_at DESC, hp1.id ASC
LIMIT _limit
)
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards, hp.muted_reasons
FROM comments,
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY comments.updated_at DESC, comments.id ASC
LIMIT _limit;
END
$function$
LANGUAGE plpgsql STABLE;
DROP FUNCTION IF EXISTS hivemind_app.list_comments_by_author_last_update(character varying, timestamp, character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION hivemind_app.list_comments_by_author_last_update(
in _author hivemind_app.hive_accounts.name%TYPE,
in _updated_at hivemind_app.hive_posts.updated_at%TYPE,
in _start_post_author hivemind_app.hive_accounts.name%TYPE,
in _start_post_permlink hivemind_app.hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF hivemind_app.database_api_post
AS
$function$
DECLARE
__author_id INT;
__post_id INT;
BEGIN
__author_id = hivemind_app.find_account_id(_author, True);
__post_id = hivemind_app.find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
WITH comments AS MATERIALIZED -- list_comments_by_author_last_update
(
SELECT
hp1.id,
hp1.updated_at
FROM hivemind_app.live_posts_comments_view hp1
WHERE hp1.author_id = __author_id
AND NOT hp1.is_muted
AND (
hp1.updated_at < _updated_at
OR hp1.updated_at = _updated_at
AND hp1.id >= __post_id AND hp1.id != 0
)
ORDER BY hp1.updated_at DESC, hp1.id ASC
LIMIT _limit
)
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards, hp.muted_reasons
FROM comments,
LATERAL hivemind_app.get_post_view_by_id(comments.id) hp
ORDER BY comments.updated_at DESC, comments.id ASC
LIMIT _limit;
END
$function$
LANGUAGE plpgsql STABLE;
DROP FUNCTION IF EXISTS hivemind_endpoints.database_api_list_comments;
CREATE FUNCTION hivemind_endpoints.database_api_list_comments(IN _params JSONB)
RETURNS JSONB
LANGUAGE 'plpgsql'
STABLE
AS
$$
DECLARE
_start JSONB;
_limit INT;
BEGIN
_params = hivemind_postgrest_utilities.validate_json_arguments(_params, '{"start": "array", "limit": "number", "order": "string"}', 3, NULL);
_start = hivemind_postgrest_utilities.parse_argument_from_json(_params, 'start', True);
_limit = hivemind_postgrest_utilities.parse_integer_argument_from_json(_params, 'limit', False);
_limit = hivemind_postgrest_utilities.valid_number(_limit, 1000, 1, 1000, 'limit');
CASE hivemind_postgrest_utilities.parse_argument_from_json(_params, 'order', True)
WHEN 'by_cashout_time' THEN RETURN hivemind_postgrest_utilities.list_comments_by_cashout_time(_start, _limit);
WHEN 'by_root' THEN RETURN hivemind_postgrest_utilities.list_comments_by_root_or_parent(_start, _limit, True);
WHEN 'by_parent' THEN RETURN hivemind_postgrest_utilities.list_comments_by_root_or_parent(_start, _limit, False);
WHEN 'by_last_update' THEN RETURN hivemind_postgrest_utilities.list_comments_by_last_update(_start, _limit);
WHEN 'by_author_last_update' THEN RETURN hivemind_postgrest_utilities.list_comments_by_author_last_update(_start, _limit);
WHEN 'by_permlink' THEN RETURN hivemind_postgrest_utilities.list_comments_by_permlink(_start, _limit);
ELSE RAISE EXCEPTION '%', hivemind_postgrest_utilities.raise_parameter_validation_exception('Unsupported order, valid orders: by_cashout_time, by_permlink, by_root, by_parent, by_last_update, by_author_last_update');
END CASE;
END;
$$
;
\ No newline at end of file
......@@ -224,8 +224,6 @@ BEGIN
result := hivemind_endpoints.database_api_list_votes(__params);
WHEN __method_type = 'find_comments' THEN
result := hivemind_endpoints.database_api_find_comments(__params);
WHEN __method_type = 'list_comments' THEN
result := hivemind_endpoints.database_api_list_comments(__params);
ELSE
RAISE EXCEPTION '%', hivemind_postgrest_utilities.raise_method_not_found_exception('database_api' || __method_type);
END CASE;
......
DROP FUNCTION IF EXISTS hivemind_app.update_hive_posts_api_helper(INTEGER, INTEGER);
CREATE OR REPLACE FUNCTION hivemind_app.update_hive_posts_api_helper(in _first_block_num INTEGER, _last_block_num INTEGER)
RETURNS void
LANGUAGE 'plpgsql'
VOLATILE
AS $BODY$
BEGIN
IF _first_block_num IS NULL OR _last_block_num IS NULL THEN
-- initial creation of table.
INSERT INTO hivemind_app.hive_posts_api_helper
(id, author_s_permlink)
SELECT hp.id, hp.author || '/' || hp.permlink
FROM hivemind_app.live_posts_comments_view hp
JOIN hivemind_app.hive_accounts ha ON (ha.id = hp.author_id)
JOIN hivemind_app.hive_permlink_data hpd_p ON (hpd_p.id = hp.permlink_id)
;
ELSE
-- Regular incremental update.
INSERT INTO hivemind_app.hive_posts_api_helper (id, author_s_permlink)
SELECT hp.id, ha.name || '/' || hpd_p.permlink
FROM hivemind_app.live_posts_comments_view hp
JOIN hivemind_app.hive_accounts ha ON (ha.id = hp.author_id)
JOIN hivemind_app.hive_permlink_data hpd_p ON (hpd_p.id = hp.permlink_id)
WHERE hp.block_num BETWEEN _first_block_num AND _last_block_num
ON CONFLICT (id) DO NOTHING
;
END IF;
END
$BODY$
;
......@@ -8,6 +8,7 @@ END$$;
-- In case such tables have been created directly by admin, drop them first to allow correct creation and access during upgrade process.
DROP TABLE IF EXISTS hivemind_app.hive_db_vacuum_needed;
DROP TABLE IF EXISTS hivemind_app.hive_db_data_migration;
DROP TABLE IF EXISTS hivemind_app.hive_posts_api_helper;
SET ROLE hivemind;
......
......@@ -436,7 +436,6 @@ class Blocks:
f"SELECT {SCHEMA_NAME}.update_posts_rshares({block_number}, {block_number})",
f"SELECT {SCHEMA_NAME}.update_hive_posts_children_count({block_number}, {block_number})",
f"SELECT {SCHEMA_NAME}.update_hive_posts_root_id({block_number},{block_number})",
f"SELECT {SCHEMA_NAME}.update_hive_posts_api_helper({block_number},{block_number})",
f"SELECT {SCHEMA_NAME}.update_feed_cache({block_number}, {block_number})",
f"SELECT {SCHEMA_NAME}.update_hive_posts_mentions({block_number}, {block_number})",
f"SELECT {SCHEMA_NAME}.update_notification_cache({block_number}, {block_number}, {is_hour_action})",
......
......@@ -92,7 +92,7 @@ VALUES
"""
OperationBase.pos_in_block += 1
OperationBase.operation_id = Db.instance().query_one(sql='SELECT operation_id FROM hive.operation_id(:block_num, :op_type_id, :pos_in_block);',
OperationBase.operation_id = Db.instance().query_one(sql='SELECT operation_id FROM hafd.operation_id(:block_num, :op_type_id, :pos_in_block);',
block_num=self.block_number,
op_type_id=self.type.value,
pos_in_block=OperationBase.pos_in_block,
......
......@@ -8,134 +8,6 @@ from hive.server.database_api.objects import database_post_object
from hive.utils.normalize import escape_characters
@return_error_info
async def list_comments(context, start: list, limit: int = 1000, order: str = None):
"""Returns all comments, starting with the specified options."""
supported_order_list = [
'by_cashout_time',
'by_permlink',
'by_root',
'by_parent',
'by_last_update',
'by_author_last_update',
]
assert not order is None, "missing a required argument: 'order'"
assert order in supported_order_list, f"Unsupported order, valid orders: {', '.join(supported_order_list)}"
limit = valid_limit(limit, 1000, 1000)
db = context['db']
result = []
if order == 'by_cashout_time':
assert (
len(start) == 3
), "Expecting three arguments in 'start' array: cashout time, optional page start author and permlink"
cashout_time = start[0]
valid_date(cashout_time)
if cashout_time[0:4] == '1969':
cashout_time = "infinity"
author = start[1]
valid_account(author, allow_empty=True)
permlink = start[2]
valid_permlink(permlink, allow_empty=True)
sql = f"SELECT * FROM {SCHEMA_NAME}.list_comments_by_cashout_time(:cashout_time, :author, :permlink, :limit)"
result = await db.query_all(sql, cashout_time=cashout_time, author=author, permlink=permlink, limit=limit)
elif order == 'by_permlink':
assert len(start) == 2, "Expecting two arguments in 'start' array: author and permlink"
author = start[0]
assert isinstance(author, str), "invalid account name type"
permlink = start[1]
assert isinstance(permlink, str), "permlink must be string"
sql = f"SELECT * FROM {SCHEMA_NAME}.list_comments_by_permlink(:author, :permlink, :limit)"
result = await db.query_all(sql, author=author, permlink=permlink, limit=limit)
elif order == 'by_root':
assert (
len(start) == 4
), "Expecting 4 arguments in 'start' array: discussion root author and permlink, optional page start author and permlink"
root_author = start[0]
valid_account(root_author)
root_permlink = start[1]
valid_permlink(root_permlink)
start_post_author = start[2]
valid_account(start_post_author, allow_empty=True)
start_post_permlink = start[3]
valid_permlink(start_post_permlink, allow_empty=True)
sql = f"SELECT * FROM {SCHEMA_NAME}.list_comments_by_root(:root_author, :root_permlink, :start_post_author, :start_post_permlink, :limit)"
result = await db.query_all(
sql,
root_author=root_author,
root_permlink=root_permlink,
start_post_author=start_post_author,
start_post_permlink=start_post_permlink,
limit=limit,
)
elif order == 'by_parent':
assert (
len(start) == 4
), "Expecting 4 arguments in 'start' array: parent post author and permlink, optional page start author and permlink"
parent_author = start[0]
valid_account(parent_author)
parent_permlink = start[1]
valid_permlink(parent_permlink)
start_post_author = start[2]
valid_account(start_post_author, allow_empty=True)
start_post_permlink = start[3]
valid_permlink(start_post_permlink, allow_empty=True)
sql = f"SELECT * FROM {SCHEMA_NAME}.list_comments_by_parent(:parent_author, :parent_permlink, :start_post_author, :start_post_permlink, :limit)"
result = await db.query_all(
sql,
parent_author=parent_author,
parent_permlink=parent_permlink,
start_post_author=start_post_author,
start_post_permlink=start_post_permlink,
limit=limit,
)
elif order == 'by_last_update':
assert (
len(start) == 4
), "Expecting 4 arguments in 'start' array: parent author, update time, optional page start author and permlink"
parent_author = start[0]
valid_account(parent_author)
updated_at = start[1]
valid_date(updated_at)
start_post_author = start[2]
valid_account(start_post_author, allow_empty=True)
start_post_permlink = start[3]
valid_permlink(start_post_permlink, allow_empty=True)
sql = f"SELECT * FROM {SCHEMA_NAME}.list_comments_by_last_update(:parent_author, :updated_at, :start_post_author, :start_post_permlink, :limit)"
result = await db.query_all(
sql,
parent_author=parent_author,
updated_at=updated_at,
start_post_author=start_post_author,
start_post_permlink=start_post_permlink,
limit=limit,
)
elif order == 'by_author_last_update':
assert (
len(start) == 4
), "Expecting 4 arguments in 'start' array: author, update time, optional page start author and permlink"
author = start[0]
valid_account(author)
updated_at = start[1]
valid_date(updated_at)
start_post_author = start[2]
valid_account(start_post_author, allow_empty=True)
start_post_permlink = start[3]
valid_permlink(start_post_permlink, allow_empty=True)
sql = f"SELECT * FROM {SCHEMA_NAME}.list_comments_by_author_last_update(:author, :updated_at, :start_post_author, :start_post_permlink, :limit)"
result = await db.query_all(
sql,
author=author,
updated_at=updated_at,
start_post_author=start_post_author,
start_post_permlink=start_post_permlink,
limit=limit,
)
return {"comments": [database_post_object(dict(row)) for row in result]}
@return_error_info
async def find_comments(context, comments: list):
"""Search for comments: limit and order is ignored in hive code"""
......
......@@ -168,7 +168,6 @@ def build_methods():
# database_api methods
methods.add(
**{
'database_api.list_comments': database_api.list_comments,
'database_api.find_comments': database_api.find_comments,
'database_api.list_votes': database_api.list_votes,
'database_api.find_votes': database_api.find_votes,
......
Subproject commit 0ae830ca07114d0b076e11e054e9221e476a3728
Subproject commit 2b7b90a2a6a486bd31690c1718c7596e38a7fd80
......@@ -42,8 +42,6 @@ ADD_API_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tes
ADD_API_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind condenser_api get_tags_used_by_author steemit)
ADD_API_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind condenser_api get_trending_tags blocktrades 1)
ADD_API_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind database_api list_comments ["steemit","firstpost","",""] 1 by_root)
ADD_API_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind follow_api get_account_reputations 1 blocktrades 1)
ADD_API_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind follow_api get_blog 1 blocktardes 0 1)
ADD_API_TEST(${CMAKE_BINARY_DIR}/tests/tests_api ${CMAKE_CURRENT_SOURCE_DIR}/tests_api hivemind follow_api get_blog_entries 1 blocktrades 0 1)
......
{
"code": -32000,
"data": {
"code": 13,
"message": "Day of month value is out of range 1..31",
"name": "N5boost16exception_detail10clone_implINS0_19error_info_injectorINS_9gregorian16bad_day_of_monthEEEEE",
"stack": [
{
"context": {
"file": "time.cpp",
"hostname": "",
"level": "warn",
"line": 48,
"method": "from_iso_string",
"timestamp": "2020-09-29T12:44:27"
},
"data": {
"what": "Day of month value is out of range 1..31"
},
"format": "${what}: unable to convert ISO-formatted string to fc::time_point_sec"
}
]
},
"message": "Day of month value is out of range 1..31:Day of month value is out of range 1..31: unable to convert ISO-formatted string to fc::time_point_sec"
}
{
"code": -32602,
"data": "Date is blank",
"message": "Invalid parameters"
}
---
test_name: Hivemind
marks:
- patterntest
- negative
includes:
- !include ../../../common.yaml
stages:
- name: test
request:
url: "{service.proto:s}://{service.server:s}:{service.port}/"
method: POST
headers:
content-type: application/json
json:
jsonrpc: "2.0"
id: 1
method: "database_api.list_comments"
params:
{
"start": ["gtg", "", "", ""],
"limit": 10,
"order": "by_author_last_update",
}
response:
status_code: 200
verify_response_with:
function: validate_response:compare_response_with_pattern
extra_kwargs:
error_response: true
\ No newline at end of file
{
"code": -32000,
"data": {
"code": 13,
"message": "Day of month value is out of range 1..31",
"name": "N5boost16exception_detail10clone_implINS0_19error_info_injectorINS_9gregorian16bad_day_of_monthEEEEE",
"stack": [
{
"context": {
"file": "time.cpp",
"hostname": "",
"level": "warn",
"line": 48,
"method": "from_iso_string",
"timestamp": "2020-10-12T14:19:52"
},
"data": {
"what": "Day of month value is out of range 1..31"
},
"format": "${what}: unable to convert ISO-formatted string to fc::time_point_sec"
}
]
},
"message": "Day of month value is out of range 1..31:Day of month value is out of range 1..31: unable to convert ISO-formatted string to fc::time_point_sec"
}