Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 127-eliminate-more-references-to-hive_posts_view
  • 221-vacuum-hivemind-tables
  • 267-rebased-onto-develop
  • 676-as-tiny-assets
  • 72-postgres-warning-about-wrong-collation-in-recursive_deps-2
  • abw_ecv_op_experiment
  • abw_max_retries
  • abw_post_delete_fix
  • abw_rshares_experiment
  • add-git-to-requirements
  • add-vote-info
  • arc-get-content-deleted
  • as-tmp-to-remove
  • asuch-limit-follows
  • asuch-postgrest-fixing-problems
  • asuch-replace-python-with-sql-get-follow-count
  • asuch-tmp-wip-condenser-get-blog
  • autoexplain-python
  • bridge_api.get_ranked_posts_fixes
  • bw_1_27_5rc8_2master
  • bw_develop-haf-rebase
  • bw_docker_supplement
  • bw_entrypoint_signal_handler_fix
  • bw_haf_compat_sync
  • bw_hafah_datasource_test
  • bw_master2develop
  • bw_mi/hivemind_wit_sa_btracker_rebase
  • bw_rebased_develop-haf
  • bw_restore_log_memory_usage_call
  • bw_simplify_blacklisted_by_observer_view
  • bw_temp_hived_source_node_verification
  • bw_update_posts_rshares_speedup
  • bw_v1_27_5_0_0_rc0
  • change-python-limits
  • cherry-pick-5dd1da34
  • cherry-pick-98eaf112
  • complete-refactor
  • db-upgrade-assert
  • deployed_20200917
  • deployed_20200928
  • deployed_20200928_pure
  • deployed_20200928_reversible_ops
  • deployed_fixes_2_develop
  • develop
  • develop-haf-backup
  • dk-benchmarks-ci-improvements
  • dk-communities-unit-tests
  • dk-get-ids-for-accounts-hotfix
  • dk-issue-3-concurrent-block-query
  • dk-list-votes-pre24
  • dk-migration-script-tags-support
  • dk-num-block-hive-feed-cache
  • dk-readme-update
  • dk-reputation_api_support
  • dk-revert-black-lists
  • dk-sql-file-list
  • dk-sql-script-executor
  • dk-sql-scripts-from-schema
  • dk-xdist-and-time
  • dn-autovacuum
  • dn-default-autovacuum
  • dn-testing
  • dn_get_block_range
  • dn_parallel_safe
  • dn_prof
  • doc-fix
  • dockerfile-update-fix
  • emf-limit-follows
  • enum_block_operations-support
  • feature/beneficiaries_communities
  • feature/hive_votes_no_index
  • feature/mute-reason-test
  • feature/mute-reason_rebase
  • feature/new_communities_type_old
  • feature/new_community_types
  • feature/role-only-if-subscribed-test1
  • fix-duplicate-pinned-posts
  • fixing-tests-with-limits
  • follow-deltas
  • follow-redesign
  • follow-redesign-speedups
  • follow-redesign-tests
  • follow_api_tests
  • get-discussion-experiment
  • hivemind_testers
  • imwatsi-first-steps
  • jes2850-decentralized-lists
  • jsalyers-add-a-cascade
  • jsalyers-fix-muting-for-reblogs
  • jsalyers-fix-muting-on-bridge-get-discussion
  • jsalyers-muting-v2
  • jsalyers-test-mute-changes
  • kbotor/backup/building-hivemind-from-other-repos
  • kbotor/building-hivemind-from-other-repos
  • kbotor/ci-rewrite-for-parallel-replay
  • km_ah_api
  • km_get_content_2_0
  • km_get_content_fill_missing2deployed
  • km_history
  • km_improved_log_gathering
  • 0.25.4
  • 1.25.0rc0
  • 1.25.2rc
  • 1.26.0
  • 1.26.1
  • 1.26.2
  • 1.26.3
  • 1.27.0.dev0
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.3.0.0
  • 1.27.3.0.0dev11
  • 1.27.3.0.0dev12
  • 1.27.3.0.0dev7
  • 1.27.5
  • 1.27.5.0.0rc7
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc3
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • Before-dk-issue-3
  • Full-sync
  • Full-sync-20200928
  • Full-sync-20201026
  • ScheduledBenchmarkTesting_18_Aug
  • env/staging-permissions
  • full_hive_sync_17_05_2021
  • git_versioning_support
  • hivemind_ci_support
  • tmp-logs
  • v1.24-RC
  • v1.24.0
  • v1.24.1
  • v1.24.1-RC
  • v1.24.2
  • v1.25.1
  • v1.25.2
  • v1.25.3
  • v1.25.4
  • v1.26.0
  • v1.27.4.0.0
  • v1.27.4.0.0dev1
  • v1.27.4.0.0dev2
160 results

Target

Select target project
  • hive/hivemind
1 result
Select Git revision
  • 127-eliminate-more-references-to-hive_posts_view
  • 221-vacuum-hivemind-tables
  • 267-rebased-onto-develop
  • 676-as-tiny-assets
  • 72-postgres-warning-about-wrong-collation-in-recursive_deps-2
  • abw_ecv_op_experiment
  • abw_max_retries
  • abw_post_delete_fix
  • abw_rshares_experiment
  • add-git-to-requirements
  • add-vote-info
  • arc-get-content-deleted
  • as-tmp-to-remove
  • asuch-limit-follows
  • asuch-postgrest-fixing-problems
  • asuch-replace-python-with-sql-get-follow-count
  • asuch-tmp-wip-condenser-get-blog
  • autoexplain-python
  • bridge_api.get_ranked_posts_fixes
  • bw_1_27_5rc8_2master
  • bw_develop-haf-rebase
  • bw_docker_supplement
  • bw_entrypoint_signal_handler_fix
  • bw_haf_compat_sync
  • bw_hafah_datasource_test
  • bw_master2develop
  • bw_mi/hivemind_wit_sa_btracker_rebase
  • bw_rebased_develop-haf
  • bw_restore_log_memory_usage_call
  • bw_simplify_blacklisted_by_observer_view
  • bw_temp_hived_source_node_verification
  • bw_update_posts_rshares_speedup
  • bw_v1_27_5_0_0_rc0
  • change-python-limits
  • cherry-pick-5dd1da34
  • cherry-pick-98eaf112
  • complete-refactor
  • db-upgrade-assert
  • deployed_20200917
  • deployed_20200928
  • deployed_20200928_pure
  • deployed_20200928_reversible_ops
  • deployed_fixes_2_develop
  • develop
  • develop-haf-backup
  • dk-benchmarks-ci-improvements
  • dk-communities-unit-tests
  • dk-get-ids-for-accounts-hotfix
  • dk-issue-3-concurrent-block-query
  • dk-list-votes-pre24
  • dk-migration-script-tags-support
  • dk-num-block-hive-feed-cache
  • dk-readme-update
  • dk-reputation_api_support
  • dk-revert-black-lists
  • dk-sql-file-list
  • dk-sql-script-executor
  • dk-sql-scripts-from-schema
  • dk-xdist-and-time
  • dn-autovacuum
  • dn-default-autovacuum
  • dn-testing
  • dn_get_block_range
  • dn_parallel_safe
  • dn_prof
  • doc-fix
  • dockerfile-update-fix
  • emf-limit-follows
  • enum_block_operations-support
  • feature/beneficiaries_communities
  • feature/hive_votes_no_index
  • feature/mute-reason-test
  • feature/mute-reason_rebase
  • feature/new_communities_type_old
  • feature/new_community_types
  • feature/role-only-if-subscribed-test1
  • fix-duplicate-pinned-posts
  • fixing-tests-with-limits
  • follow-deltas
  • follow-redesign
  • follow-redesign-speedups
  • follow-redesign-tests
  • follow_api_tests
  • get-discussion-experiment
  • hivemind_testers
  • imwatsi-first-steps
  • jes2850-decentralized-lists
  • jsalyers-add-a-cascade
  • jsalyers-fix-muting-for-reblogs
  • jsalyers-fix-muting-on-bridge-get-discussion
  • jsalyers-muting-v2
  • jsalyers-test-mute-changes
  • kbotor/backup/building-hivemind-from-other-repos
  • kbotor/building-hivemind-from-other-repos
  • kbotor/ci-rewrite-for-parallel-replay
  • km_ah_api
  • km_get_content_2_0
  • km_get_content_fill_missing2deployed
  • km_history
  • km_improved_log_gathering
  • 0.25.4
  • 1.25.0rc0
  • 1.25.2rc
  • 1.26.0
  • 1.26.1
  • 1.26.2
  • 1.26.3
  • 1.27.0.dev0
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.3.0.0
  • 1.27.3.0.0dev11
  • 1.27.3.0.0dev12
  • 1.27.3.0.0dev7
  • 1.27.5
  • 1.27.5.0.0rc7
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc3
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • Before-dk-issue-3
  • Full-sync
  • Full-sync-20200928
  • Full-sync-20201026
  • ScheduledBenchmarkTesting_18_Aug
  • env/staging-permissions
  • full_hive_sync_17_05_2021
  • git_versioning_support
  • hivemind_ci_support
  • tmp-logs
  • v1.24-RC
  • v1.24.0
  • v1.24.1
  • v1.24.1-RC
  • v1.24.2
  • v1.25.1
  • v1.25.2
  • v1.25.3
  • v1.25.4
  • v1.26.0
  • v1.27.4.0.0
  • v1.27.4.0.0dev1
  • v1.27.4.0.0dev2
160 results
Show changes
Commits on Source (18)
Showing
with 1981 additions and 385 deletions
...@@ -144,3 +144,5 @@ Pipfile.lock ...@@ -144,3 +144,5 @@ Pipfile.lock
pghero.yml pghero.yml
*~ *~
.tmp .tmp
.private
stages:
- build
- test
- data-supply
- deploy
- e2e-test
- benchmark-tests
- post-deploy
variables:
GIT_DEPTH: 1
LC_ALL: "C"
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG"
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID))
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
default:
before_script:
- pwd
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
hivemind_build:
stage: build
script:
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
tags:
- hivemind
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
hivemind_sync:
stage: data-supply
environment:
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME"
needs:
- job: hivemind_build
artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- pip3 install --user --upgrade pip setuptools
# WARNING!!! temporarily hardcoded 5000017 instead $HIVEMIND_MAX_BLOCK
# revert that change when $HIVEMIND_MAX_BLOCK will be set to 5000017
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" 5000017 $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hivemind-sync.log
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hive_server.pid
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_stop_server:
stage: post-deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
script:
- scripts/ci_stop_server.sh hive_server.pid
needs:
- job: hivemind_start_server
artifacts: true
tags:
- hivemind
artifacts:
paths:
- hive_server.log
.hivemind_start_api_smoketest: &common_api_smoketest_job
stage: e2e-test
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
bridge_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest.xml
follow_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml
mock_tests:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" mock_tests/ api_smoketest_mock_tests.xml
api_smoketest_benchmark:
stage: benchmark-tests
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
allow_failure: true
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
script:
- ./scripts/ci_start_api_benchmarks.sh localhost $HIVEMIND_HTTP_PORT 5
artifacts:
when: always
paths:
- tavern_benchmarks_report.html
# https://hub.docker.com/r/library/python/tags/
image: "python:3.7"
stages: stages:
- build - build
- test - test
- data-supply - data-supply
- deploy - deploy
- e2e-test - e2e-test
- benchmark-tests - benchmark-tests
- post-deploy - post-deploy
variables: .dk-setup-pip: &dk-setup-pip
GIT_DEPTH: 1 - python -m venv .venv
LC_ALL: "C" - source .venv/bin/activate
GIT_STRATEGY: clone - time pip install --upgrade pip setuptools wheel
GIT_SUBMODULE_STRATEGY: recursive - pip --version
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name - easy_install --version
- wheel version
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL - pipenv --version
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG" - poetry --version
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID)) - time pip install --editable .[dev]
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER .dk-setup-runner-env: &dk-setup-runner-env
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD # Setup runner environment (to connect to correct postgres server, mainly).
POSTGRES_HOST_AUTH_METHOD: trust - TMP_VAR=$(cat hive-sync-runner-id.txt 2>/dev/null || true); export HIVE_SYNC_RUNNER_ID=${TMP_VAR:-0}
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html - eval $(cat "$RUNNER_CONF" | ./scripts/ci/setup_env.py --current-runner-id=${CI_RUNNER_ID} --hive-sync-runner-id=${HIVE_SYNC_RUNNER_ID})
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
.dk-set-variables: &dk-set-variables
before_script: # - export # List all variables and its values set by Gitlab CI.
- pwd - whoami
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL" - echo "CI_RUNNER_ID is $CI_RUNNER_ID"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX" - echo "CI_PIPELINE_URL is $CI_PIPELINE_URL"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID" - echo "CI_PIPELINE_ID is $CI_PIPELINE_ID"
- echo "CI_COMMIT_SHORT_SHA is $CI_COMMIT_SHORT_SHA"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG" - echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
- export HIVEMIND_DB_NAME=${HIVEMIND_DB_NAME//[^a-zA-Z0-9_]/_}
- echo "HIVEMIND_DB_NAME is $HIVEMIND_DB_NAME"
hivemind_build: .dk-fetch-git-tags: &dk-fetch-git-tags
stage: build # - git fetch --tags # Looks to be unnecessary.
script: - git tag -f ci_implicit_tag # Needed to build python package
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- when: always
tags:
- hivemind
hivemind_sync:
stage: data-supply
environment:
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME"
needs:
- job: hivemind_build
artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script: .dk-start-timer: &dk-start-timer
- pip3 install --user --upgrade pip setuptools - ./scripts/ci/timer.sh start
# WARNING: hardcoded 5000017 for max block
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" 5000017 $HIVEMIND_HTTP_PORT
artifacts: .dk-stop-timer: &dk-stop-timer
paths: - ./scripts/ci/timer.sh check
- hivemind-sync.log
expire_in: 1 week .dk-hive-sync-script-common: &dk-hive-sync-script-common
- echo "${CI_RUNNER_ID}" > hive-sync-runner-id.txt
- ./scripts/ci/wait-for-postgres.sh "$RUNNER_POSTGRES_HOST" "$RUNNER_POSTGRES_PORT"
- export POSTGRES_MAJOR_VERSION=$(./scripts/ci/get-postgres-version.sh)
- ./scripts/ci/create-db.sh
- ./scripts/ci/hive-sync.sh
- ./scripts/ci/collect-db-stats.sh
.dk-rules-for-sync: &dk-rules-for-sync
rules: rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"' - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"' - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always when: always
- if: '$CI_PIPELINE_SOURCE == "push"' - if: '$CI_PIPELINE_SOURCE == "push"'
when: manual when: manual
- when: on_success - when: manual
tags:
- hivemind
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hive_server.pid
expire_in: 1 week
.dk-rules-for-test: &dk-rules-for-test
rules: rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"' - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always when: on_success
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"' - if: '$CI_PIPELINE_SOURCE == "push"'
when: manual when: on_success
- when: on_success - when: on_success
tags: .dk-default:
- hivemind image: hivemind/python:3.6
interruptible: true
hivemind_stop_server: inherit:
stage: post-deploy default: false
environment: variables: false
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
variables: variables:
GIT_STRATEGY: none GIT_DEPTH: 10
rules: GIT_STRATEGY: fetch
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"' GIT_SUBMODULE_STRATEGY: recursive
when: always PIPENV_VENV_IN_PROJECT: 1
- when: manual PIPENV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pipenv"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
POSTGRES_CLIENT_TOOLS_PATH: /usr/lib/postgresql
HIVEMIND_DB_NAME: "hive_${CI_COMMIT_REF_SLUG}"
cache: &dk-global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key.
# Change this key, if you need to clear cache.
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- *dk-start-timer
- *dk-fetch-git-tags
- *dk-set-variables
- *dk-setup-pip
- *dk-setup-runner-env
after_script:
- *dk-stop-timer
##### Jobs #####
dk-hivemind-sync:
# Postgres shared on host.
extends: .dk-default
<<: *dk-rules-for-sync
stage: data-supply
needs: []
script: script:
- scripts/ci_stop_server.sh hive_server.pid - *dk-hive-sync-script-common
needs:
- job: hivemind_start_server
artifacts: true
tags:
- hivemind
artifacts: artifacts:
paths: paths:
- hive_server.log - hivemind-sync.log
- pg-stats
- hive-sync-runner-id.txt
expire_in: 7 days
tags:
- hivemind-heavy-job
.hivemind_start_api_smoketest: &common_api_smoketest_job .dk-test-common:
stage: e2e-test extends: .dk-default
environment: hive-4.pl.syncad.com <<: *dk-rules-for-test
needs: needs:
- job: hivemind_start_server - job: dk-hivemind-sync
artifacts: true artifacts: true
allow_failure: false
variables: before_script:
GIT_STRATEGY: none - *dk-start-timer
- *dk-fetch-git-tags
rules: - *dk-set-variables
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"' - *dk-setup-pip
when: always - *dk-setup-runner-env
- if: '$CI_PIPELINE_SOURCE == "push"' - ./scripts/ci/wait-for-postgres.sh "$RUNNER_POSTGRES_HOST" "$RUNNER_POSTGRES_PORT"
when: manual - ./scripts/ci/hive-server.sh start
- when: on_success after_script:
- *dk-stop-timer
tags: tags:
- hivemind - hivemind-light-job
bridge_api_smoketest:
<<: *common_api_smoketest_job
dk-bridge_api_smoketest:
stage: e2e-test
extends: .dk-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml - |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
bridge_api_patterns/ api_smoketest_bridge.xml \
$RUNNER_TEST_JOBS
artifacts: artifacts:
when: always
reports: reports:
junit: api_smoketest_bridge.xml junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative: dk-bridge_api_smoketest_negative:
<<: *common_api_smoketest_job stage: e2e-test
extends: .dk-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml - |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
bridge_api_negative/ api_smoketest_bridge_negative.xml \
$RUNNER_TEST_JOBS
artifacts: artifacts:
when: always
reports: reports:
junit: api_smoketest_bridge_negative.xml junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest: dk-condenser_api_smoketest:
<<: *common_api_smoketest_job stage: e2e-test
extends: .dk-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml - |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
condenser_api_patterns/ api_smoketest_condenser_api.xml \
$RUNNER_TEST_JOBS
artifacts: artifacts:
when: always
reports: reports:
junit: api_smoketest_condenser_api.xml junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative: dk-condenser_api_smoketest_negative:
<<: *common_api_smoketest_job stage: e2e-test
extends: .dk-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml - |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
condenser_api_negative/ api_smoketest_condenser_api_negative.xml \
$RUNNER_TEST_JOBS
artifacts: artifacts:
when: always
reports: reports:
junit: api_smoketest_condenser_api_negative.xml junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest: dk-database_api_smoketest:
<<: *common_api_smoketest_job stage: e2e-test
extends: .dk-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml - |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
database_api_patterns/ api_smoketest_database_api.xml \
$RUNNER_TEST_JOBS
artifacts: artifacts:
when: always
reports: reports:
junit: api_smoketest_database_api.xml junit: api_smoketest_database_api.xml
database_api_smoketest_negative: dk-database_api_smoketest_negative:
<<: *common_api_smoketest_job stage: e2e-test
extends: .dk-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml - |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
database_api_negative/ api_smoketest_database_api_negative.xml \
$RUNNER_TEST_JOBS
artifacts: artifacts:
when: always
reports: reports:
junit: api_smoketest_database_api_negative.xml junit: api_smoketest_database_api_negative.xml
follow_api_smoketest: dk-follow_api_smoketest:
<<: *common_api_smoketest_job stage: e2e-test
extends: .dk-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml - |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
follow_api_patterns/ api_smoketest_follow_api.xml \
$RUNNER_TEST_JOBS
artifacts: artifacts:
when: always
reports: reports:
junit: api_smoketest.xml junit: api_smoketest.xml
follow_api_smoketest_negative: dk-follow_api_smoketest_negative:
<<: *common_api_smoketest_job stage: e2e-test
extends: .dk-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml - |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
follow_api_negative/ api_smoketest_follow_api_negative.xml \
$RUNNER_TEST_JOBS
artifacts: artifacts:
when: always
reports: reports:
junit: api_smoketest_follow_api_negative.xml junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest: dk-tags_api_smoketest:
<<: *common_api_smoketest_job stage: e2e-test
extends: .dk-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml - |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
tags_api_negative/ api_smoketest_tags_api_negative.xml \
$RUNNER_TEST_JOBS
artifacts:
when: always
reports:
junit: api_smoketest_tags_api_negative.xml
dk-tags_api_smoketest_negative:
stage: e2e-test
extends: .dk-test-common
script:
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
tags_api_patterns/ api_smoketest_tags_api.xml \
$RUNNER_TEST_JOBS
artifacts: artifacts:
when: always
reports: reports:
junit: api_smoketest_tags_api.xml junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative: dk-mock_tests:
<<: *common_api_smoketest_job stage: e2e-test
extends: .dk-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml - |
scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
mock_tests/ api_smoketest_mock_tests.xml \
$RUNNER_TEST_JOBS
api_smoketest_benchmark: dk-api-smoketest-benchmark:
stage: benchmark-tests stage: benchmark-tests
environment: hive-4.pl.syncad.com extends: .dk-test-common
needs: # Temporary failure (when any call is longer than 1s is allowed)
- job: hivemind_start_server
artifacts: true
allow_failure: true allow_failure: true
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
script: script:
- ./scripts/ci_start_api_benchmarks.sh localhost $HIVEMIND_HTTP_PORT 5 - |
./scripts/ci/start-api-benchmarks.sh \
localhost $RUNNER_HIVEMIND_SERVER_HTTP_PORT 5 \
$RUNNER_TEST_JOBS
- ./scripts/xml_report_parser.py . ./tests/tests_api/hivemind/tavern
artifacts: artifacts:
when: always when: always
paths: paths:
......
version: "3" version: "3.2"
services: services:
python-3.6:
image: hivemind/python:3.6 python-3.6-dev:
image: hivemind/python:3.6-dev
build: build:
context: . context: .
dockerfile: ./scripts/ci/python/3.6/Dockerfile dockerfile: ./scripts/ci/python/3.6/dev.dockerfile
args: args:
- user=${USER} - user=${USER}
- workdir=/home/${USER} - workdir=/home/${USER}/hivemind
user: ${USER} user: ${USER}
shm_size: 0 # security_opt:
# Below command makes your container running forever. # # Significant performance boost (about 5%), but very insecure.
# # See https://medium.com/better-programming/faster-python-in-docker-d1a71a9b9917
# # See https://docs.docker.com/engine/security/seccomp/
# - seccomp:unconfined
shm_size: 2g
# command: ["tail", "-f", "/dev/null"] # command: ["tail", "-f", "/dev/null"]
volumes:
# Sockets of postgres servers on dockers.
- "postgres-10-run:/var/run/postgres-10"
- "postgres-12-run:/var/run/postgres-12"
# Sockets of postgres servers on host.
- "/var/run/postgresql:/var/run/postgresql"
# For keeping python dependencies created in docker.
- "python-3.6-dev:/home/${USER}"
# Application stuff from host.
- "$PWD/hive:$PWD/hive"
- "$PWD/tests:$PWD/tests"
- "$PWD/hive.conf:$PWD/hive.conf"
- "$PWD/pyproject.toml:$PWD/pyproject.toml"
- "$PWD/README.md:$PWD/README.md"
- "$PWD/setup.cfg:$PWD/setup.cfg"
- "$PWD/setup.py:$PWD/setup.py"
- "$PWD/tox.ini:$PWD/tox.ini"
python-3.6:
image: hivemind/python:3.6
build:
context: .
dockerfile: ./scripts/ci/python/3.6/Dockerfile
args:
- user=worker
user: worker
shm_size: 2g
volumes:
# Sockets of postgres servers on host.
- "/var/run/postgresql:/var/run/postgresql"
python-3.8: python-3.8:
image: hivemind/python:3.8 image: hivemind/python:3.8
shm_size: 0
build: build:
context: . context: .
dockerfile: ./scripts/ci/python/3.8/Dockerfile dockerfile: ./scripts/ci/python/3.8/Dockerfile
args: args:
- user=${USER} - user=worker
- workdir=/home/${USER} user: worker
user: ${USER} shm_size: 2g
# Below command makes your container running forever. volumes:
# command: ["tail", "-f", "/dev/null"] # Sockets of postgres servers on host.
- "/var/run/postgresql:/var/run/postgresql"
postgres-10: postgres-10:
image: hivemind/postgres:10 image: hivemind/postgres:10
...@@ -37,35 +75,17 @@ services: ...@@ -37,35 +75,17 @@ services:
environment: environment:
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
volumes: volumes:
- postgres-10-pgdata:/var/lib/postgresql/data - $PWD/$POSTGRES_10_CONF_FILE:/etc/postgresql/postgresql.conf:ro
- postgres-10-run:/var/run/postgresql
ports: ports:
- "${POSTGRES_10_PUBLISHED_PORT}:5432" - "${POSTGRES_10_PUBLISHED_PORT}:5432"
shm_size: 0 shm_size: 12g
command: [ command: [
"postgres", "postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats", "-c", "config_file=/etc/postgresql/postgresql.conf"
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=12GB",
"-c", "effective_cache_size=36GB",
"-c", "maintenance_work_mem=2GB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=31457kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=12",
"-c", "max_parallel_workers_per_gather=4",
"-c", "max_parallel_workers=12",
] ]
postgres-12: postgres-12:
image: hivemind/postgres:12 image: hivemind/postgres:12
restart: unless-stopped restart: unless-stopped
...@@ -75,56 +95,38 @@ services: ...@@ -75,56 +95,38 @@ services:
environment: environment:
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
volumes: volumes:
- postgres-12-pgdata:/var/lib/postgresql/data - $PWD/$POSTGRES_12_CONF_FILE:/etc/postgresql/postgresql.conf:ro
- postgres-12-run:/var/run/postgresql
ports: ports:
- "${POSTGRES_12_PUBLISHED_PORT}:5432" - "${POSTGRES_12_PUBLISHED_PORT}:5432"
shm_size: 0 shm_size: 12g
# https://pgtune.leopard.in.ua/#/ oltp 48G ram, 12 cpus, ssd
command: [ command: [
"postgres", "postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats", "-c", "config_file=/etc/postgresql/postgresql.conf"
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=12GB",
"-c", "effective_cache_size=36GB",
"-c", "maintenance_work_mem=2GB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=31457kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=12",
"-c", "max_parallel_workers_per_gather=4",
"-c", "max_parallel_workers=12",
"-c", "max_parallel_maintenance_workers=4",
] ]
hived-node: hived-node:
image: registry.gitlab.syncad.com/hive/hive/consensus_node:00b5ff55 image: $HIVED_IMAGE
restart: unless-stopped restart: unless-stopped
# ports: ports:
# - "2001:2001" - "$HIVED_PUBLISHED_WS_PORT:8090" # websocket
# - "8090:8090" - "$HIVED_PUBLISHED_HTTP_PORT:8091"
# - "8091:8091" shm_size: 12g
shm_size: 0
entrypoint: /usr/local/hive/consensus/entrypoint.sh entrypoint: /usr/local/hive/consensus/entrypoint.sh
command: >- command: [
--replay-blockchain "--replay-blockchain",
--stop-replay-at-block 5000000 "--stop-replay-at-block 5000000"
]
volumes: volumes:
- $PWD/scripts/ci/hived-node/entrypoint.sh:/usr/local/hive/consensus/entrypoint.sh - $PWD/scripts/ci/hived-node/entrypoint.sh:/usr/local/hive/consensus/entrypoint.sh
- $PWD/scripts/ci/hived-node/config.ini:/usr/local/hive/consensus/datadir/config.ini - $PWD/scripts/ci/hived-node/config.ini:/usr/local/hive/consensus/datadir/config.ini
- ${HIVED_BLOCK_LOG_FILE}:/usr/local/hive/consensus/datadir/blockchain/block_log - ${HIVED_BLOCK_LOG_FILE}:/usr/local/hive/consensus/datadir/blockchain/block_log
- hived-node-datadir:/usr/local/hive/consensus/datadir - hived-node-datadir:/usr/local/hive/consensus/datadir
volumes: volumes:
postgres-10-pgdata: postgres-10-run:
postgres-12-pgdata: postgres-12-run:
hived-node-datadir: hived-node-datadir:
python-3.6-dev:
...@@ -21,13 +21,13 @@ def setup_logging(conf): ...@@ -21,13 +21,13 @@ def setup_logging(conf):
fmt = '%(asctime)s.%(msecs)03d{} %(created).6f ' \ fmt = '%(asctime)s.%(msecs)03d{} %(created).6f ' \
'%(levelname)s - %(name)s - %(message)s'.format(timezone) '%(levelname)s - %(name)s - %(message)s'.format(timezone)
logging.basicConfig(format=fmt, datefmt=datefmt) logging.basicConfig(format=fmt, datefmt=datefmt)
if timestamp: elif timestamp:
datefmt='%Y-%m-%d %H:%M:%S' datefmt='%Y-%m-%d %H:%M:%S'
timezone = time.strftime('%z') timezone = time.strftime('%z')
fmt = '%(asctime)s.%(msecs)03d{} ' \ fmt = '%(asctime)s.%(msecs)03d{} ' \
'%(levelname)s - %(name)s - %(message)s'.format(timezone) '%(levelname)s - %(name)s - %(message)s'.format(timezone)
logging.basicConfig(format=fmt, datefmt=datefmt) logging.basicConfig(format=fmt, datefmt=datefmt)
if epoch: elif epoch:
fmt = '%(created).6f %(levelname)s - %(name)s - %(message)s' fmt = '%(created).6f %(levelname)s - %(name)s - %(message)s'
logging.basicConfig(format=fmt) logging.basicConfig(format=fmt)
else: else:
......
...@@ -840,7 +840,70 @@ ...@@ -840,7 +840,70 @@
"transaction_merkle_root": "0000000000000000000000000000000000000000", "transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [], "extensions": [],
"witness_signature": "", "witness_signature": "",
"transactions": [], "transactions": [
{
"ref_block_num": 100001,
"ref_block_prefix": 1,
"expiration": "2020-03-23T12:17:00",
"operations": [
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"tester1"
],
"id": "follow",
"json": "[\"follow\",{\"follower\":\"tester1\",\"following\":\"tester2\",\"what\":[\"blog\"]}]"
}
},
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"tester2"
],
"id": "follow",
"json": "[\"follow\",{\"follower\":\"tester2\",\"following\":[\"tester3\", \"tester4\"],\"what\":[\"blog\"]}]"
}
},
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"tester3"
],
"id": "follow",
"json": "[\"follow\",{\"follower\":\"tester3\",\"following\":[\"tester4\"],\"what\":[\"blog\"]}]"
}
},
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"tester4"
],
"id": "follow",
"json": "[\"follow\",{\"follower\":\"tester4\",\"following\":[\"tester5\", \"tester1\"],\"what\":[\"blog\"]}]"
}
},
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"tester5"
],
"id": "follow",
"json": "[\"follow\",{\"follower\":\"tester5\",\"following\":[\"tester1\", \"tester2\"],\"what\":[\"blog\"]}]"
}
}
]
}
],
"block_id": "004c4b4e00000000000000000000000000000000", "block_id": "004c4b4e00000000000000000000000000000000",
"signing_key": "", "signing_key": "",
"transaction_ids": [] "transaction_ids": []
......
stages:
- build
- test
- data-supply
- deploy
- e2e-test
- benchmark-tests
- post-deploy
variables:
GIT_DEPTH: 1
LC_ALL: "C"
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG"
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID))
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
default:
before_script:
- pwd
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
hivemind_build:
stage: build
script:
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
tags:
- hivemind
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
hivemind_sync:
stage: data-supply
environment:
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME"
needs:
- job: hivemind_build
artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- pip3 install --user --upgrade pip setuptools
# WARNING!!! temporarily hardcoded 5000017 instead $HIVEMIND_MAX_BLOCK
# revert that change when $HIVEMIND_MAX_BLOCK will be set to 5000017
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" 5000017 $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hivemind-sync.log
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hive_server.pid
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_stop_server:
stage: post-deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
script:
- scripts/ci_stop_server.sh hive_server.pid
needs:
- job: hivemind_start_server
artifacts: true
tags:
- hivemind
artifacts:
paths:
- hive_server.log
.hivemind_start_api_smoketest: &common_api_smoketest_job
stage: e2e-test
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
bridge_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest.xml
follow_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml
mock_tests:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" mock_tests/ api_smoketest_mock_tests.xml
api_smoketest_benchmark:
stage: benchmark-tests
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
allow_failure: true
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
script:
- ./scripts/ci_start_api_benchmarks.sh localhost $HIVEMIND_HTTP_PORT 5
artifacts:
when: always
paths:
- tavern_benchmarks_report.html
stages:
- run
variables:
GIT_DEPTH: 10
GIT_STRATEGY: fetch # It's quick, but noticed errors with that, sometimes.
# GIT_STRATEGY: clone
# GIT_STRATEGY: none
GIT_SUBMODULE_STRATEGY: recursive
MY_VARIABLE: "bamboo"
default:
image: hivemind/python:3.6
interruptible: false
cache: &global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key.
# Change this key, if you need to clear cache.
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- echo "I am before_script in child-1. MY_VARIABLE is $MY_VARIABLE"
after_script:
- echo "I am after_script in in child-1. MY_VARIABLE is $MY_VARIABLE"
child-1-job:
stage: run
rules:
- when: manual
script:
- echo "I am script in child-1-job. MY_VARIABLE is $MY_VARIABLE"
- sleep 30
- exit 1
tags:
- hivemind-light-job
stages:
- run
variables:
GIT_DEPTH: 10
GIT_STRATEGY: fetch # It's quick, but noticed errors with that, sometimes.
# GIT_STRATEGY: clone
# GIT_STRATEGY: none
GIT_SUBMODULE_STRATEGY: recursive
MY_VARIABLE: "bamboo"
default:
image: hivemind/python:3.6
interruptible: false
cache: &global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key.
# Change this key, if you need to clear cache.
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- echo "I am before_script in child-2. MY_VARIABLE is $MY_VARIABLE"
after_script:
- echo "I am after_script in child-2. MY_VARIABLE is $MY_VARIABLE"
child-2-job:
stage: run
script:
- echo "I am script in child-2-job. MY_VARIABLE is $MY_VARIABLE"
tags:
- hivemind-light-job
# See https://gitlab.com/fgrimshaw/dynamic-ci
# See https://gitlab.com/gitlab-org/gitlab/-/issues/212373
# I tested this feature, but our current version of Gitlab 13.2.2
# doesn't support it well. Child pipelines run with no problem,
# but UI displays wrong badges, for instance job was marked as
# still running, though it was finished. Also jobs with rule
# "when: manual" where started without user's permission.
# We need to wait for better support in Gitlab UI.
stages:
- run
variables:
GIT_STRATEGY: none
trigger-child-1:
stage: run
rules:
- if: '$CI_COMMIT_MESSAGE =~ /child-1/'
when: always
trigger:
include: .gitlab-ci-child-pipeline-1.yaml
strategy: depend
trigger-child-2:
stage: run
rules:
- if: '$CI_COMMIT_MESSAGE =~ /child-2/'
when: always
trigger:
include: .gitlab-ci-child-pipeline-2.yaml
strategy: depend
...@@ -2,25 +2,27 @@ ...@@ -2,25 +2,27 @@
set -euo pipefail set -euo pipefail
collect_stats() { collect_db_stats() {
echo "Collecting statistics from database ${HIVEMIND_DB_NAME}" echo "Collecting statistics from database ${HIVEMIND_DB_NAME}"
mkdir -p pg-stats mkdir -p pg-stats
DIR=$PWD/pg-stats DIR=$PWD/pg-stats
PGPASSWORD=${POSTGRES_PASSWORD} psql \ PGPASSWORD=${RUNNER_POSTGRES_APP_USER_PASSWORD} psql \
--username "${POSTGRES_USER}" \ --username "${RUNNER_POSTGRES_APP_USER=}" \
--host ${POSTGRES_HOST} \ --host ${RUNNER_POSTGRES_HOST} \
--port ${POSTGRES_PORT} \ --port ${RUNNER_POSTGRES_PORT} \
--dbname ${HIVEMIND_DB_NAME} << EOF --dbname ${HIVEMIND_DB_NAME} << EOF
\timing \timing
\copy (select * from pg_settings) to '$DIR/pg_settings.csv' WITH CSV HEADER \copy (select * from pg_settings) to '$DIR/pg_settings.csv' WITH CSV HEADER
\copy (select * from pg_stat_user_tables) to '$DIR/pg_stat_user_tables.csv' WITH CSV HEADER \copy (select * from pg_stat_user_tables) to '$DIR/pg_stat_user_tables.csv' WITH CSV HEADER
-- Disabled, because this table is too big. -- Disabled, because this table is too big.
--\copy (select * from pg_stat_statements) to '$DIR/pg_stat_statements.csv' WITH CSV HEADER -- \copy (select * from pg_stat_statements) to '$DIR/pg_stat_statements.csv' WITH CSV HEADER
/*
-- Looks to be unuseful.
-- See https://github.com/powa-team/pg_qualstats -- See https://github.com/powa-team/pg_qualstats
\echo pg_qualstats index advisor \echo pg_qualstats index advisor
SELECT v SELECT v
...@@ -33,8 +35,9 @@ SELECT v ...@@ -33,8 +35,9 @@ SELECT v
FROM json_array_elements( FROM json_array_elements(
pg_qualstats_index_advisor(min_filter => 50)->'unoptimised') v pg_qualstats_index_advisor(min_filter => 50)->'unoptimised') v
ORDER BY v::text COLLATE "C"; ORDER BY v::text COLLATE "C";
*/
EOF EOF
} }
collect_stats collect_db_stats
...@@ -2,46 +2,48 @@ ...@@ -2,46 +2,48 @@
set -euo pipefail set -euo pipefail
# TODO We have troubles with user, when postgresql is run from docker.
# We need user name `postgres`, not other, I'm afraid.
# ADMIN_POSTGRES_USER=postgres
# ADMIN_POSTGRES_USER_PASSWORD=postgres
create_db() { create_db() {
echo "Creating user ${HIVEMIND_POSTGRES_USER} and database ${HIVEMIND_DB_NAME}, owned by this user" echo "Creating user ${RUNNER_POSTGRES_APP_USER} and database ${HIVEMIND_DB_NAME}, owned by this user"
TEMPLATE="template_monitoring"
PGPASSWORD=${ADMIN_POSTGRES_USER_PASSWORD} psql \ PGPASSWORD=${RUNNER_POSTGRES_ADMIN_USER_PASSWORD} psql \
--username "${ADMIN_POSTGRES_USER}" \ --username "${RUNNER_POSTGRES_ADMIN_USER}" \
--host ${POSTGRES_HOST} \ --host ${RUNNER_POSTGRES_HOST} \
--port ${POSTGRES_PORT} \ --port ${RUNNER_POSTGRES_PORT} \
--dbname postgres << EOF --dbname postgres << EOF
\echo Creating role ${HIVEMIND_POSTGRES_USER} \echo Creating role ${RUNNER_POSTGRES_APP_USER}
DO \$$ DO \$$
BEGIN BEGIN
IF EXISTS (SELECT * FROM pg_user IF EXISTS (SELECT * FROM pg_user
WHERE pg_user.usename = '${HIVEMIND_POSTGRES_USER}') THEN WHERE pg_user.usename = '${RUNNER_POSTGRES_APP_USER}') THEN
raise warning 'Role % already exists', '${HIVEMIND_POSTGRES_USER}'; raise warning 'Role % already exists', '${RUNNER_POSTGRES_APP_USER}';
ELSE ELSE
CREATE ROLE ${HIVEMIND_POSTGRES_USER} CREATE ROLE ${RUNNER_POSTGRES_APP_USER}
WITH LOGIN PASSWORD '${HIVEMIND_POSTGRES_USER_PASSWORD}'; WITH LOGIN PASSWORD '${RUNNER_POSTGRES_APP_USER_PASSWORD}';
END IF; END IF;
END END
\$$; \$$;
\echo Creating database ${HIVEMIND_DB_NAME} -- We drop database to enable retry of CI job.
\echo Dropping database ${HIVEMIND_DB_NAME}
DROP DATABASE IF EXISTS ${HIVEMIND_DB_NAME};
CREATE DATABASE ${HIVEMIND_DB_NAME} TEMPLATE template_monitoring \echo Creating database ${HIVEMIND_DB_NAME}
OWNER ${HIVEMIND_POSTGRES_USER}; CREATE DATABASE ${HIVEMIND_DB_NAME} TEMPLATE ${TEMPLATE}
OWNER ${RUNNER_POSTGRES_APP_USER};
COMMENT ON DATABASE ${HIVEMIND_DB_NAME} IS COMMENT ON DATABASE ${HIVEMIND_DB_NAME} IS
'Database for Gitlab CI pipeline ${CI_PIPELINE_URL}, commit ${CI_COMMIT_SHORT_SHA}'; 'Database for Gitlab CI pipeline ${CI_PIPELINE_URL}, commit ${CI_COMMIT_SHORT_SHA}';
\c ${HIVEMIND_DB_NAME} \c ${HIVEMIND_DB_NAME}
drop schema if exists hivemind_admin cascade;
create schema hivemind_admin create schema hivemind_admin
authorization ${HIVEMIND_POSTGRES_USER}; authorization ${RUNNER_POSTGRES_APP_USER};
CREATE SEQUENCE hivemind_admin.database_metadata_id_seq CREATE SEQUENCE hivemind_admin.database_metadata_id_seq
INCREMENT 1 INCREMENT 1
...@@ -63,10 +65,10 @@ CREATE TABLE hivemind_admin.database_metadata ...@@ -63,10 +65,10 @@ CREATE TABLE hivemind_admin.database_metadata
); );
alter sequence hivemind_admin.database_metadata_id_seq alter sequence hivemind_admin.database_metadata_id_seq
OWNER TO ${HIVEMIND_POSTGRES_USER}; OWNER TO ${RUNNER_POSTGRES_APP_USER};
alter table hivemind_admin.database_metadata alter table hivemind_admin.database_metadata
OWNER TO ${HIVEMIND_POSTGRES_USER}; OWNER TO ${RUNNER_POSTGRES_APP_USER};
insert into hivemind_admin.database_metadata insert into hivemind_admin.database_metadata
(database_name, ci_pipeline_url, ci_pipeline_id, commit_sha) (database_name, ci_pipeline_url, ci_pipeline_id, commit_sha)
...@@ -75,6 +77,8 @@ values ( ...@@ -75,6 +77,8 @@ values (
${CI_PIPELINE_ID}, '${CI_COMMIT_SHORT_SHA}' ${CI_PIPELINE_ID}, '${CI_COMMIT_SHORT_SHA}'
); );
-- VACUUM VERBOSE ANALYZE;
\q \q
EOF EOF
......
...@@ -2,26 +2,30 @@ ...@@ -2,26 +2,30 @@
set -euo pipefail set -euo pipefail
echo "Dumping database ${HIVEMIND_DB_NAME}" dump_db() {
echo "Dumping database ${HIVEMIND_DB_NAME}"
export PGPASSWORD=${POSTGRES_PASSWORD} export PGPASSWORD=${RUNNER_POSTGRES_APP_USER_PASSWORD}
exec_path=$POSTGRES_CLIENT_TOOLS_PATH/$POSTGRES_MAJOR_VERSION/bin exec_path=$POSTGRES_CLIENT_TOOLS_PATH/$POSTGRES_MAJOR_VERSION/bin
echo "Using pg_dump version $($exec_path/pg_dump --version)" echo "Using pg_dump version $($exec_path/pg_dump --version)"
time $exec_path/pg_dump \ time $exec_path/pg_dump \
--username="${POSTGRES_USER}" \ --username="${RUNNER_POSTGRES_APP_USER}" \
--host="${POSTGRES_HOST}" \ --host="${RUNNER_POSTGRES_HOST}" \
--port="${POSTGRES_PORT}" \ --port="${RUNNER_POSTGRES_PORT}" \
--dbname="${HIVEMIND_DB_NAME}" \ --dbname="${HIVEMIND_DB_NAME}" \
--schema=public \ --schema=public \
--format=directory \ --format=directory \
--jobs=4 \ --jobs=4 \
--compress=6 \ --compress=6 \
--quote-all-identifiers \ --quote-all-identifiers \
--lock-wait-timeout=30000 \ --lock-wait-timeout=30000 \
--no-privileges --no-acl \ --no-privileges --no-acl \
--verbose \ --verbose \
--file="pg-dump-${HIVEMIND_DB_NAME}" --file="pg-dump-${HIVEMIND_DB_NAME}"
unset PGPASSWORD unset PGPASSWORD
}
dump_db
...@@ -5,17 +5,16 @@ ...@@ -5,17 +5,16 @@
set -euo pipefail set -euo pipefail
get_postgres_version() { get_postgres_version() {
# Get major version of postgres server.
version=$( version=$(
PGPASSWORD=$POSTGRES_PASSWORD psql -X -A -t \ PGPASSWORD=$RUNNER_POSTGRES_APP_USER_PASSWORD psql -X -A -t \
--username $POSTGRES_USER \ --username $RUNNER_POSTGRES_APP_USER \
--host $POSTGRES_HOST \ --host $RUNNER_POSTGRES_HOST \
--port ${POSTGRES_PORT} \ --port ${RUNNER_POSTGRES_PORT} \
--dbname postgres \ --dbname postgres \
-c "show server_version_num;" -c "show server_version_num;"
) )
echo $(echo $version | cut -c1-2) echo $(echo $version | cut -c1-2)
} }
get_postgres_version get_postgres_version
stages: # Useful snippets for Gitlab CI, but not used currently.
- build
- data-supply
- e2e-test
variables:
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# GIT_DEPTH: 10
GIT_DEPTH: 1
# GIT_STRATEGY: fetch # Noticed errors with that.
GIT_STRATEGY: clone
# GIT_STRATEGY: none
GIT_SUBMODULE_STRATEGY: recursive
PIPENV_VENV_IN_PROJECT: 1
PIPENV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pipenv"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
POSTGRES_CLIENT_TOOLS_PATH: /usr/lib/postgresql
# POSTGRES_HOST: 172.17.0.1 # Host
# POSTGRES_HOST: postgres-10 # Docker service
POSTGRES_PORT: 5432
# Set on project level in Gitlab CI.
# We need create role and create db privileges.
# ADMIN_POSTGRES_USER: postgres
# ADMIN_POSTGRES_USER_PASSWORD: postgres
# Needed by old runner ssh-executor, probably.
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
HIVEMIND_DB_NAME: "hive_${CI_COMMIT_REF_SLUG}_pipeline_id_${CI_PIPELINE_ID}"
HIVEMIND_EXEC_NAME: $DB_NAME
# Set on project level in Gitlab CI.
# HIVEMIND_POSTGRES_USER: hivemind_ci
# Set on project level in Gitlab CI.
HIVEMIND_POSTGRES_USER_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# Set on project level in Gitlab CI.
# HIVEMIND_HTTP_PORT: 18080
# Set on project level in Gitlab CI.
# HIVEMIND_MAX_BLOCK: 10001
# HIVEMIND_MAX_BLOCK: 5000001
# Set on project level in Gitlab CI.
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://hive-4.pl.syncad.com:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"192.168.6.136:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://172.17.0.1:8091"}
.postgres-10: &postgres-10 .postgres-10: &postgres-10
name: hivemind/postgres:10 name: hivemind/postgres:10
...@@ -114,17 +56,6 @@ variables: ...@@ -114,17 +56,6 @@ variables:
"-c", "max_parallel_workers=4", "-c", "max_parallel_workers=4",
] ]
.setup-pip: &setup-pip
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- time pip install --editable .
.setup-setuptools: &setup-setuptools .setup-setuptools: &setup-setuptools
- python -m venv .venv - python -m venv .venv
- source .venv/bin/activate - source .venv/bin/activate
...@@ -136,9 +67,9 @@ variables: ...@@ -136,9 +67,9 @@ variables:
- poetry --version - poetry --version
- time python setup.py develop - time python setup.py develop
# no virtual environment .setup-setuptools-no-venv: &setup-setuptools-no-venv
.setuptools: &setup-setuptools-no-venv # No virtual environment here.
# setuptools will install all dependencies to this directory. # Setuptools will install all dependencies to PYTHONUSERBASE directory.
- export PYTHONUSERBASE=./local-site - export PYTHONUSERBASE=./local-site
- time pip install --upgrade pip setuptools wheel - time pip install --upgrade pip setuptools wheel
- pip --version - pip --version
...@@ -148,7 +79,6 @@ variables: ...@@ -148,7 +79,6 @@ variables:
- poetry --version - poetry --version
- mkdir -p `python -m site --user-site` - mkdir -p `python -m site --user-site`
- python setup.py install --user --force - python setup.py install --user --force
# we can probably also run via: ./hive/cli.py
- ln -sf ./local-site/bin/hive "$HIVEMIND_EXEC_NAME" - ln -sf ./local-site/bin/hive "$HIVEMIND_EXEC_NAME"
.setup-pipenv: &setup-pipenv .setup-pipenv: &setup-pipenv
...@@ -167,67 +97,6 @@ variables: ...@@ -167,67 +97,6 @@ variables:
- pipenv --version - pipenv --version
- poetry --version - poetry --version
.set-variables: &set-variables
- whoami
# list all variables predefined by Gitlab CI
# - export
- echo "CI_PIPELINE_URL is $CI_PIPELINE_URL"
- echo "CI_PIPELINE_ID is $CI_PIPELINE_ID"
- echo "CI_COMMIT_SHORT_SHA is $CI_COMMIT_SHORT_SHA"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
- export HIVEMIND_DB_NAME=${HIVEMIND_DB_NAME//[^a-zA-Z0-9_]/_}
- echo "HIVEMIND_DB_NAME is $HIVEMIND_DB_NAME"
- export HIVEMIND_POSTGRESQL_CONNECTION_STRING=postgresql://${HIVEMIND_POSTGRES_USER}:${HIVEMIND_POSTGRES_USER_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${HIVEMIND_DB_NAME}
.fetch-git-tags: &fetch-git-tags
# - git fetch --tags
- git tag -f ci_implicit_tag # Needed to build python package
.start_timer: &start-timer
- ./scripts/ci/timer.sh start
.stop-timer: &stop-timer
- ./scripts/ci/timer.sh check
.hive-sync-script-common: &hive-sync-script-common
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- export POSTGRES_MAJOR_VERSION=$(./scripts/ci/get-postgres-version.sh)
- ./scripts/ci/create-db.sh
- ./scripts/ci/hive-sync.sh
- ./scripts/ci/collect-db-stats.sh
.default-rules: &default-rules
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
default:
image: hivemind/python:3.6
# image: hivemind/python:3.8
interruptible: false
timeout: 2h
cache: &global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key. Change this key, if you need
# to clear cache
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
after_script:
- *stop-timer
##### Jobs ##### ##### Jobs #####
...@@ -257,31 +126,16 @@ default: ...@@ -257,31 +126,16 @@ default:
tags: tags:
- hivemind-light-job - hivemind-light-job
# Postgres shared # Postgres as docker service
hivemind-sync: .hivemind-sync-postgres-as-service:
<<: *default-rules # <<: *default-rules
stage: data-supply
needs: []
script:
- *hive-sync-script-common
artifacts:
paths:
- hivemind-sync.log
- pg-stats
expire_in: 7 days
tags:
- hivemind-heavy-job
# Postgres as service
.hivemind-sync:
<<: *default-rules
stage: data-supply stage: data-supply
services: services:
- *postgres-10 - *postgres-10
# - *postgres-12 # - *postgres-12
needs: [] needs: []
script: script:
- *hive-sync-script-common # - *hive-sync-script-common
# - ./scripts/ci/dump-db.sh # - ./scripts/ci/dump-db.sh
artifacts: artifacts:
paths: paths:
...@@ -292,131 +146,9 @@ hivemind-sync: ...@@ -292,131 +146,9 @@ hivemind-sync:
tags: tags:
- hivemind-heavy-job - hivemind-heavy-job
.e2e-test-common: # Test job doing nothing (for debugging CI)
rules: .just-a-test:
- when: on_success
needs:
- job: hivemind-sync
artifacts: false
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- ./scripts/ci/hive-server.sh start
after_script:
- ./scripts/ci/hive-server.sh stop
- *stop-timer
tags:
- hivemind-light-job
bridge_api_smoketest:
stage: e2e-test stage: e2e-test
extends: .e2e-test-common extends: .e2e-test-common
script: script:
- > - echo "Run some tests"
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest_follow_api.xml
follow_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_negative/ api_smoketest_tags_api_negative.xml
artifacts:
reports:
junit: api_smoketest_tags_api_negative.xml
...@@ -4,17 +4,11 @@ ...@@ -4,17 +4,11 @@
set -euo pipefail set -euo pipefail
JOB=$1
HIVEMIND_PID=0 HIVEMIND_PID=0
MERCY_KILL_TIMEOUT=5 MERCY_KILL_TIMEOUT=5
START_DELAY=5 START_DELAY=5
# For debug only!
# HIVED_URL='{"default":"http://hived-node:8091"}'
# HIVED_URL='{"default":"http://172.17.0.1:8091"}'
# HIVED_URL='{"default":"http://127.0.0.1:8091"}'
# HIVEMIND_HTTP_PORT="8080"
# HIVEMIND_POSTGRESQL_CONNECTION_STRING="postgresql://syncad:devdev@localhost:5432/hive_test"
check_pid() { check_pid() {
if [ -f hive_server.pid ]; then if [ -f hive_server.pid ]; then
HIVEMIND_PID=`cat hive_server.pid` HIVEMIND_PID=`cat hive_server.pid`
...@@ -24,6 +18,7 @@ check_pid() { ...@@ -24,6 +18,7 @@ check_pid() {
echo "Process pid $HIVEMIND_PID is running" echo "Process pid $HIVEMIND_PID is running"
else else
# Process is not running # Process is not running
echo "Process pid $HIVEMIND_PID is not running"
rm hive_server.pid rm hive_server.pid
HIVEMIND_PID=0 HIVEMIND_PID=0
fi fi
...@@ -33,7 +28,7 @@ check_pid() { ...@@ -33,7 +28,7 @@ check_pid() {
} }
stop() { stop() {
if [ "$HIVEMIND_PID" -gt "0" ]; then if [ "$HIVEMIND_PID" -gt 0 ]; then
HIVEMIND_PID=`cat hive_server.pid` HIVEMIND_PID=`cat hive_server.pid`
# Send INT signal and give it some time to stop. # Send INT signal and give it some time to stop.
...@@ -52,22 +47,25 @@ stop() { ...@@ -52,22 +47,25 @@ stop() {
fi fi
} }
start() { start() {
if [ "$HIVEMIND_PID" -gt "0" ]; then if [ "$HIVEMIND_PID" -gt 0 ]; then
echo "Hive server is already running (pid $HIVEMIND_PID)" echo "Hive server is already running (pid $HIVEMIND_PID)"
exit 0 exit 0
fi fi
echo "Starting hive server on port ${HIVEMIND_HTTP_PORT}" echo "Starting hive server on port ${RUNNER_HIVEMIND_SERVER_HTTP_PORT}"
USER=${RUNNER_POSTGRES_APP_USER}:${RUNNER_POSTGRES_APP_USER_PASSWORD}
OPTIONS="host=${RUNNER_POSTGRES_HOST}&port=${RUNNER_POSTGRES_PORT}"
DATABASE_URL="postgresql://${USER}@/${HIVEMIND_DB_NAME}?${OPTIONS}"
hive server \ hive server \
--log-mask-sensitive-data \ --log-mask-sensitive-data \
--pid-file hive_server.pid \ --pid-file hive_server.pid \
--http-server-port $HIVEMIND_HTTP_PORT \ --http-server-port ${RUNNER_HIVEMIND_SERVER_HTTP_PORT} \
--steemd-url "$HIVED_URL" \ --steemd-url "${RUNNER_HIVED_URL}" \
--database-url "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" 2>&1 \ --database-url "${DATABASE_URL}" 2>&1 \
| tee -ia hivemind-server.log & | tee -ia hivemind-server.log &
HIVEMIND_PID=$! HIVEMIND_PID=$!
...@@ -81,11 +79,14 @@ start() { ...@@ -81,11 +79,14 @@ start() {
if ps -p $HIVEMIND_PID > /dev/null if ps -p $HIVEMIND_PID > /dev/null
then then
echo "Hive server is running (pid $HIVEMIND_PID)" echo "Hive server is running (pid $HIVEMIND_PID)"
# Write pid to file, sometimes there's wrong pid there.
echo $HIVEMIND_PID > hive_server.pid
exit 0 exit 0
else else
# Check if process executed successfully or not. # Check if process executed successfully or not.
if wait $HIVEMIND_PID; then if wait $HIVEMIND_PID; then
echo "Hive server has been started (pid $HIVEMIND_PID)" echo "Hive server has been started (pid $HIVEMIND_PID)"
echo $HIVEMIND_PID > hive_server.pid
exit 0 exit 0
else else
RESULT=$? RESULT=$?
...@@ -107,5 +108,16 @@ start() { ...@@ -107,5 +108,16 @@ start() {
} }
check_pid main() {
"$1" check_pid
if [ "$JOB" = "start" ]; then
start
elif [ "$JOB" = "stop" ]; then
stop
else
echo "Invalid argument"
exit 1
fi
}
main
...@@ -2,25 +2,35 @@ ...@@ -2,25 +2,35 @@
set -euo pipefail set -euo pipefail
cat << EOF # For debug only!
Starting hive sync using hived url: ${HIVED_URL}. # RUNNER_HIVEMIND_SYNC_MAX_BLOCK=10000
Max sync block is: ${HIVEMIND_MAX_BLOCK}. # RUNNER_HIVED_URL='{"default":"http://hived-node:8091"}'
# RUNNER_HIVED_URL='{"default":"http://172.17.0.1:8091"}'
hive_sync() {
# Start hive sync process
cat << EOF
Starting hive sync using hived url: ${RUNNER_HIVED_URL}.
Max sync block is: ${RUNNER_HIVEMIND_SYNC_MAX_BLOCK}.
EOF EOF
# For debug only! USER=${RUNNER_POSTGRES_APP_USER}:${RUNNER_POSTGRES_APP_USER_PASSWORD}
# HIVEMIND_MAX_BLOCK=10001 OPTIONS="host=${RUNNER_POSTGRES_HOST}&port=${RUNNER_POSTGRES_PORT}"
# HIVED_URL='{"default":"http://hived-node:8091"}' DATABASE_URL="postgresql://${USER}@/${HIVEMIND_DB_NAME}?${OPTIONS}"
# HIVED_URL='{"default":"http://172.17.0.1:8091"}'
hive sync \
DATABASE_URL="postgresql://${HIVEMIND_POSTGRES_USER}:${HIVEMIND_POSTGRES_USER_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${HIVEMIND_DB_NAME}" --log-mask-sensitive-data \
--pid-file hive_sync.pid \
hive sync \ --test-max-block=${RUNNER_HIVEMIND_SYNC_MAX_BLOCK} \
--log-mask-sensitive-data \ --exit-after-sync \
--pid-file hive_sync.pid \ --test-profile=False \
--test-max-block=${HIVEMIND_MAX_BLOCK} \ --steemd-url "${RUNNER_HIVED_URL}" \
--exit-after-sync \ --prometheus-port 11011 \
--test-profile=False \ --database-url "${DATABASE_URL}" \
--steemd-url "$HIVED_URL" \ --mock-block-data-path mock_data/block_data/follow_op/mock_block_data_follow.json \
--prometheus-port 11011 \ 2>&1 | tee -i hivemind-sync.log
--database-url "$DATABASE_URL" \
2>&1 | tee -i hivemind-sync.log }
hive_sync
...@@ -8,7 +8,7 @@ plugin = webserver p2p json_rpc ...@@ -8,7 +8,7 @@ plugin = webserver p2p json_rpc
plugin = database_api plugin = database_api
# condenser_api enabled per abw request # condenser_api enabled per abw request
plugin = condenser_api plugin = condenser_api
plugin = block_api plugin = block_api
# gandalf enabled witness + rc # gandalf enabled witness + rc
plugin = witness plugin = witness
plugin = rc plugin = rc
...@@ -34,7 +34,7 @@ plugin = block_api network_broadcast_api rc_api ...@@ -34,7 +34,7 @@ plugin = block_api network_broadcast_api rc_api
history-disable-pruning = 1 history-disable-pruning = 1
account-history-rocksdb-path = "blockchain/account-history-rocksdb-storage" account-history-rocksdb-path = "blockchain/account-history-rocksdb-storage"
#shared-file-dir = "/run/hive" # shared-file-dir = "/run/hive"
shared-file-size = 20G shared-file-size = 20G
shared-file-full-threshold = 9500 shared-file-full-threshold = 9500
shared-file-scale-rate = 1000 shared-file-scale-rate = 1000
...@@ -45,8 +45,8 @@ market-history-bucket-size = [15,60,300,3600,86400] ...@@ -45,8 +45,8 @@ market-history-bucket-size = [15,60,300,3600,86400]
market-history-buckets-per-size = 5760 market-history-buckets-per-size = 5760
p2p-endpoint = 0.0.0.0:2001 p2p-endpoint = 0.0.0.0:2001
p2p-seed-node = p2p-seed-node =
#gtg.openhive.network:2001 # gtg.openhive.network:2001
transaction-status-block-depth = 64000 transaction-status-block-depth = 64000
transaction-status-track-after-block = 42000000 transaction-status-track-after-block = 42000000
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
MYDIR="$PWD" MYDIR="$PWD"
WORKDIR="/usr/local/hive/consensus" WORKDIR="/usr/local/hive/consensus"
IMAGE="registry.gitlab.syncad.com/hive/hive/consensus_node:00b5ff55"
docker run -d \ docker run -d \
--name hived-replay-5000000 \ --name hived-replay-5000000 \
...@@ -14,5 +15,5 @@ docker run -d \ ...@@ -14,5 +15,5 @@ docker run -d \
-v $MYDIR/blockchain/block_log:$WORKDIR/datadir/blockchain/block_log \ -v $MYDIR/blockchain/block_log:$WORKDIR/datadir/blockchain/block_log \
-v $MYDIR/entrypoint.sh:$WORKDIR/entrypoint.sh \ -v $MYDIR/entrypoint.sh:$WORKDIR/entrypoint.sh \
--entrypoint $WORKDIR/entrypoint.sh \ --entrypoint $WORKDIR/entrypoint.sh \
registry.gitlab.syncad.com/hive/hive/consensus_node:00b5ff55 \ $IMAGE \
--replay-blockchain --stop-replay-at-block 5000000 --replay-blockchain --stop-replay-at-block 5000000
This diff is collapsed.