Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 127-eliminate-more-references-to-hive_posts_view
  • 221-vacuum-hivemind-tables
  • 267-rebased-onto-develop
  • 267-update-notification-cache-3
  • 676-as-tiny-assets
  • 72-postgres-warning-about-wrong-collation-in-recursive_deps-2
  • abw_ecv_op_experiment
  • abw_max_retries
  • abw_post_delete_fix
  • abw_rshares_experiment
  • add-git-to-requirements
  • add-vote-info
  • arc-get-content-deleted
  • as-tmp-to-remove
  • asuch-limit-follows
  • asuch-postgrest-fixing-problems
  • asuch-replace-python-with-sql-get-follow-count
  • asuch-tmp-wip-condenser-get-blog
  • autoexplain-python
  • bridge_api.get_ranked_posts_fixes
  • bw_1_27_5rc8_2master
  • bw_develop-haf-rebase
  • bw_docker_supplement
  • bw_entrypoint_signal_handler_fix
  • bw_haf_compat_sync
  • bw_hafah_datasource_test
  • bw_master2develop
  • bw_mi/hivemind_wit_sa_btracker_rebase
  • bw_rebased_develop-haf
  • bw_restore_log_memory_usage_call
  • bw_simplify_blacklisted_by_observer_view
  • bw_temp_hived_source_node_verification
  • bw_update_posts_rshares_speedup
  • bw_v1_27_5_0_0_rc0
  • change-python-limits
  • cherry-pick-5dd1da34
  • cherry-pick-98eaf112
  • complete-refactor
  • db-upgrade-assert
  • deployed_20200917
  • deployed_20200928
  • deployed_20200928_pure
  • deployed_20200928_reversible_ops
  • deployed_fixes_2_develop
  • develop
  • develop-haf-backup
  • dk-benchmarks-ci-improvements
  • dk-communities-unit-tests
  • dk-get-ids-for-accounts-hotfix
  • dk-issue-3-concurrent-block-query
  • dk-list-votes-pre24
  • dk-migration-script-tags-support
  • dk-num-block-hive-feed-cache
  • dk-readme-update
  • dk-reputation_api_support
  • dk-revert-black-lists
  • dk-sql-file-list
  • dk-sql-script-executor
  • dk-sql-scripts-from-schema
  • dk-xdist-and-time
  • dn-autovacuum
  • dn-default-autovacuum
  • dn-testing
  • dn_get_block_range
  • dn_parallel_safe
  • dn_prof
  • doc-fix
  • dockerfile-update-fix
  • emf-limit-follows
  • enum_block_operations-support
  • feature/beneficiaries_communities
  • feature/hive_votes_no_index
  • feature/mute-reason-test
  • feature/mute-reason_rebase
  • feature/new_communities_type_old
  • feature/new_community_types
  • feature/role-only-if-subscribed-test1
  • fix-duplicate-pinned-posts
  • fixing-tests-with-limits
  • follow-deltas
  • follow-redesign
  • follow-redesign-speedups
  • follow-redesign-tests
  • follow_api_tests
  • get-discussion-experiment
  • hivemind_testers
  • imwatsi-first-steps
  • jes2850-decentralized-lists
  • jsalyers-add-a-cascade
  • jsalyers-fix-muting-for-reblogs
  • jsalyers-fix-muting-on-bridge-get-discussion
  • jsalyers-muting-v2
  • jsalyers-test-mute-changes
  • kbotor/backup/building-hivemind-from-other-repos
  • kbotor/building-hivemind-from-other-repos
  • kbotor/ci-rewrite-for-parallel-replay
  • km_ah_api
  • km_get_content_2_0
  • km_get_content_fill_missing2deployed
  • km_history
  • 0.25.4
  • 1.25.0rc0
  • 1.25.2rc
  • 1.26.0
  • 1.26.1
  • 1.26.2
  • 1.26.3
  • 1.27.0.dev0
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.3.0.0
  • 1.27.3.0.0dev11
  • 1.27.3.0.0dev12
  • 1.27.3.0.0dev7
  • 1.27.5
  • 1.27.5.0.0rc7
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc3
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • Before-dk-issue-3
  • Full-sync
  • Full-sync-20200928
  • Full-sync-20201026
  • ScheduledBenchmarkTesting_18_Aug
  • env/staging-permissions
  • full_hive_sync_17_05_2021
  • git_versioning_support
  • hivemind_ci_support
  • tmp-logs
  • v1.24-RC
  • v1.24.0
  • v1.24.1
  • v1.24.1-RC
  • v1.24.2
  • v1.25.1
  • v1.25.2
  • v1.25.3
  • v1.25.4
  • v1.26.0
  • v1.27.4.0.0
  • v1.27.4.0.0dev1
  • v1.27.4.0.0dev2
160 results

Target

Select target project
  • hive/hivemind
1 result
Select Git revision
  • 127-eliminate-more-references-to-hive_posts_view
  • 221-vacuum-hivemind-tables
  • 267-rebased-onto-develop
  • 267-update-notification-cache-3
  • 676-as-tiny-assets
  • 72-postgres-warning-about-wrong-collation-in-recursive_deps-2
  • abw_ecv_op_experiment
  • abw_max_retries
  • abw_post_delete_fix
  • abw_rshares_experiment
  • add-git-to-requirements
  • add-vote-info
  • arc-get-content-deleted
  • as-tmp-to-remove
  • asuch-limit-follows
  • asuch-postgrest-fixing-problems
  • asuch-replace-python-with-sql-get-follow-count
  • asuch-tmp-wip-condenser-get-blog
  • autoexplain-python
  • bridge_api.get_ranked_posts_fixes
  • bw_1_27_5rc8_2master
  • bw_develop-haf-rebase
  • bw_docker_supplement
  • bw_entrypoint_signal_handler_fix
  • bw_haf_compat_sync
  • bw_hafah_datasource_test
  • bw_master2develop
  • bw_mi/hivemind_wit_sa_btracker_rebase
  • bw_rebased_develop-haf
  • bw_restore_log_memory_usage_call
  • bw_simplify_blacklisted_by_observer_view
  • bw_temp_hived_source_node_verification
  • bw_update_posts_rshares_speedup
  • bw_v1_27_5_0_0_rc0
  • change-python-limits
  • cherry-pick-5dd1da34
  • cherry-pick-98eaf112
  • complete-refactor
  • db-upgrade-assert
  • deployed_20200917
  • deployed_20200928
  • deployed_20200928_pure
  • deployed_20200928_reversible_ops
  • deployed_fixes_2_develop
  • develop
  • develop-haf-backup
  • dk-benchmarks-ci-improvements
  • dk-communities-unit-tests
  • dk-get-ids-for-accounts-hotfix
  • dk-issue-3-concurrent-block-query
  • dk-list-votes-pre24
  • dk-migration-script-tags-support
  • dk-num-block-hive-feed-cache
  • dk-readme-update
  • dk-reputation_api_support
  • dk-revert-black-lists
  • dk-sql-file-list
  • dk-sql-script-executor
  • dk-sql-scripts-from-schema
  • dk-xdist-and-time
  • dn-autovacuum
  • dn-default-autovacuum
  • dn-testing
  • dn_get_block_range
  • dn_parallel_safe
  • dn_prof
  • doc-fix
  • dockerfile-update-fix
  • emf-limit-follows
  • enum_block_operations-support
  • feature/beneficiaries_communities
  • feature/hive_votes_no_index
  • feature/mute-reason-test
  • feature/mute-reason_rebase
  • feature/new_communities_type_old
  • feature/new_community_types
  • feature/role-only-if-subscribed-test1
  • fix-duplicate-pinned-posts
  • fixing-tests-with-limits
  • follow-deltas
  • follow-redesign
  • follow-redesign-speedups
  • follow-redesign-tests
  • follow_api_tests
  • get-discussion-experiment
  • hivemind_testers
  • imwatsi-first-steps
  • jes2850-decentralized-lists
  • jsalyers-add-a-cascade
  • jsalyers-fix-muting-for-reblogs
  • jsalyers-fix-muting-on-bridge-get-discussion
  • jsalyers-muting-v2
  • jsalyers-test-mute-changes
  • kbotor/backup/building-hivemind-from-other-repos
  • kbotor/building-hivemind-from-other-repos
  • kbotor/ci-rewrite-for-parallel-replay
  • km_ah_api
  • km_get_content_2_0
  • km_get_content_fill_missing2deployed
  • km_history
  • 0.25.4
  • 1.25.0rc0
  • 1.25.2rc
  • 1.26.0
  • 1.26.1
  • 1.26.2
  • 1.26.3
  • 1.27.0.dev0
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.3.0.0
  • 1.27.3.0.0dev11
  • 1.27.3.0.0dev12
  • 1.27.3.0.0dev7
  • 1.27.5
  • 1.27.5.0.0rc7
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc3
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • Before-dk-issue-3
  • Full-sync
  • Full-sync-20200928
  • Full-sync-20201026
  • ScheduledBenchmarkTesting_18_Aug
  • env/staging-permissions
  • full_hive_sync_17_05_2021
  • git_versioning_support
  • hivemind_ci_support
  • tmp-logs
  • v1.24-RC
  • v1.24.0
  • v1.24.1
  • v1.24.1-RC
  • v1.24.2
  • v1.25.1
  • v1.25.2
  • v1.25.3
  • v1.25.4
  • v1.26.0
  • v1.27.4.0.0
  • v1.27.4.0.0dev1
  • v1.27.4.0.0dev2
160 results
Show changes
Commits on Source (14)
Showing
with 1253 additions and 626 deletions
stages:
- build
- data-supply
- e2e-test
variables:
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# GIT_DEPTH: 10
GIT_DEPTH: 1
# GIT_STRATEGY: fetch # Noticed errors with that.
GIT_STRATEGY: clone
# GIT_STRATEGY: none
GIT_SUBMODULE_STRATEGY: recursive
PIPENV_VENV_IN_PROJECT: 1
PIPENV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pipenv"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
POSTGRES_CLIENT_TOOLS_PATH: /usr/lib/postgresql
# POSTGRES_HOST: 172.17.0.1 # Host
# POSTGRES_HOST: postgres-10 # Docker service
POSTGRES_PORT: 5432
# Set on project level in Gitlab CI.
# We need create role and create db privileges.
# ADMIN_POSTGRES_USER: postgres
# ADMIN_POSTGRES_USER_PASSWORD: postgres
# Needed by old runner ssh-executor, probably.
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
HIVEMIND_DB_NAME: "hive_${CI_COMMIT_REF_SLUG}_pipeline_id_${CI_PIPELINE_ID}"
HIVEMIND_EXEC_NAME: $DB_NAME
# Set on project level in Gitlab CI.
# HIVEMIND_POSTGRES_USER: hivemind_ci
# Set on project level in Gitlab CI.
HIVEMIND_POSTGRES_USER_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# Set on project level in Gitlab CI.
# HIVEMIND_HTTP_PORT: 18080
# Set on project level in Gitlab CI.
# HIVEMIND_MAX_BLOCK: 10001
# HIVEMIND_MAX_BLOCK: 5000001
# Set on project level in Gitlab CI.
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://hive-4.pl.syncad.com:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"192.168.6.136:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://172.17.0.1:8091"}
.postgres-10: &postgres-10
name: hivemind/postgres:10
alias: db
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=2GB",
"-c", "effective_cache_size=6GB",
"-c", "maintenance_work_mem=512MB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=5242kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=4",
"-c", "max_parallel_workers_per_gather=2",
"-c", "max_parallel_workers=4",
]
.postgres-12: &postgres-12
name: hivemind/postgres:12
alias: db
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=2GB",
"-c", "effective_cache_size=6GB",
"-c", "maintenance_work_mem=512MB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=5242kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=4",
"-c", "max_parallel_workers_per_gather=2",
"-c", "max_parallel_workers=4",
]
.setup-pip: &setup-pip
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- time pip install --editable .
.setup-setuptools: &setup-setuptools
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- time python setup.py develop
# no virtual environment
.setuptools: &setup-setuptools-no-venv
# setuptools will install all dependencies to this directory.
- export PYTHONUSERBASE=./local-site
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- mkdir -p `python -m site --user-site`
- python setup.py install --user --force
# we can probably also run via: ./hive/cli.py
- ln -sf ./local-site/bin/hive "$HIVEMIND_EXEC_NAME"
.setup-pipenv: &setup-pipenv
## Note, that Pipfile must exist.
## `--sequential` is slower, but doesn't emit messages about errors
## and need to repeat install.
## - pipenv sync --dev --bare --sequential
## It's faster than `--sequential`, but emits messages about errors
## and a need to repeat install, sometimes. However seems these
## errors are negligible.
- time pipenv sync --dev --bare
- source .venv/bin/activate
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
.set-variables: &set-variables
- whoami
# list all variables predefined by Gitlab CI
# - export
- echo "CI_PIPELINE_URL is $CI_PIPELINE_URL"
- echo "CI_PIPELINE_ID is $CI_PIPELINE_ID"
- echo "CI_COMMIT_SHORT_SHA is $CI_COMMIT_SHORT_SHA"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
- export HIVEMIND_DB_NAME=${HIVEMIND_DB_NAME//[^a-zA-Z0-9_]/_}
- echo "HIVEMIND_DB_NAME is $HIVEMIND_DB_NAME"
- export HIVEMIND_POSTGRESQL_CONNECTION_STRING=postgresql://${HIVEMIND_POSTGRES_USER}:${HIVEMIND_POSTGRES_USER_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${HIVEMIND_DB_NAME}
.fetch-git-tags: &fetch-git-tags
# - git fetch --tags
- git tag -f ci_implicit_tag # Needed to build python package
.start_timer: &start-timer
- ./scripts/ci/timer.sh start
.stop-timer: &stop-timer
- ./scripts/ci/timer.sh check
.hive-sync-script-common: &hive-sync-script-common
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- export POSTGRES_MAJOR_VERSION=$(./scripts/ci/get-postgres-version.sh)
- ./scripts/ci/create-db.sh
- ./scripts/ci/hive-sync.sh
- ./scripts/ci/collect-db-stats.sh
.default-rules: &default-rules
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
default:
image: hivemind/python:3.6
# image: hivemind/python:3.8
interruptible: false
timeout: 2h
cache: &global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key. Change this key, if you need
# to clear cache
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
after_script:
- *stop-timer
##### Jobs #####
.build-egg:
stage: build
needs: []
script:
- python setup.py bdist_egg
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 7 days
tags:
- hivemind-light-job
.build-wheel:
stage: build
needs: []
script:
- python setup.py bdist_wheel
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 7 days
tags:
- hivemind-light-job
# Postgres shared
hivemind-sync:
<<: *default-rules
stage: data-supply
needs: []
script:
- *hive-sync-script-common
artifacts:
paths:
- hivemind-sync.log
- pg-stats
expire_in: 7 days
tags:
- hivemind-heavy-job
# Postgres as service
.hivemind-sync:
<<: *default-rules
stage: data-supply
services:
- *postgres-10
# - *postgres-12
needs: []
script:
- *hive-sync-script-common
# - ./scripts/ci/dump-db.sh
artifacts:
paths:
- hivemind-sync.log
- pg-stats
- pg-dump-${HIVEMIND_DB_NAME}
expire_in: 7 hours
tags:
- hivemind-heavy-job
.e2e-test-common:
rules:
- when: on_success
needs:
- job: hivemind-sync
artifacts: false
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- ./scripts/ci/hive-server.sh start
after_script:
- ./scripts/ci/hive-server.sh stop
- *stop-timer
tags:
- hivemind-light-job
bridge_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest_follow_api.xml
follow_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_negative/ api_smoketest_tags_api_negative.xml
artifacts:
reports:
junit: api_smoketest_tags_api_negative.xml
# https://hub.docker.com/r/library/python/tags/
image: "python:3.7"
stages:
- build
- test
- data-supply
- deploy
- e2e-test
- post-deploy
variables:
GIT_DEPTH: 1
LC_ALL: "C"
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG"
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID))
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
before_script:
- pwd
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
hivemind_build:
stage: build
script:
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- when: always
tags:
- hivemind
hivemind_sync:
stage: data-supply
environment:
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME"
needs:
- job: hivemind_build
artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- pip3 install --user --upgrade pip setuptools
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_MAX_BLOCK $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hivemind-sync.log
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hive_server.pid
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_stop_server:
stage: post-deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- when: manual
script:
- scripts/ci_stop_server.sh hive_server.pid
needs:
- job: hivemind_start_server
artifacts: true
tags:
- hivemind
artifacts:
paths:
- hive_server.log
.hivemind_start_api_smoketest: &common_api_smoketest_job
stage: e2e-test
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
bridge_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest_follow_api.xml
follow_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml
artifacts:
reports:
junit: api_smoketest_tags_api_negative.xml
stages:
- build
- data-supply
- e2e-test
# https://hub.docker.com/r/library/python/tags/
image: "python:3.7"
stages:
- build
- test
- data-supply
- deploy
- e2e-test
- post-deploy
variables:
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# GIT_DEPTH: 10
GIT_DEPTH: 1
# GIT_STRATEGY: fetch # Noticed errors with that.
LC_ALL: "C"
GIT_STRATEGY: clone
# GIT_STRATEGY: none
GIT_SUBMODULE_STRATEGY: recursive
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name
PIPENV_VENV_IN_PROJECT: 1
PIPENV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pipenv"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
POSTGRES_CLIENT_TOOLS_PATH: /usr/lib/postgresql
# POSTGRES_HOST: 172.17.0.1 # Host
# POSTGRES_HOST: postgres-10 # Docker service
POSTGRES_PORT: 5432
# Set on project level in Gitlab CI.
# We need create role and create db privileges.
# ADMIN_POSTGRES_USER: postgres
# ADMIN_POSTGRES_USER_PASSWORD: postgres
# Needed by old runner ssh-executor, probably.
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG"
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID))
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
HIVEMIND_DB_NAME: "hive_${CI_COMMIT_REF_SLUG}_pipeline_id_${CI_PIPELINE_ID}"
HIVEMIND_EXEC_NAME: $DB_NAME
# Set on project level in Gitlab CI.
# HIVEMIND_POSTGRES_USER: hivemind_ci
# Set on project level in Gitlab CI.
HIVEMIND_POSTGRES_USER_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# Set on project level in Gitlab CI.
# HIVEMIND_HTTP_PORT: 18080
# Set on project level in Gitlab CI.
# HIVEMIND_MAX_BLOCK: 10001
# HIVEMIND_MAX_BLOCK: 5000001
# Set on project level in Gitlab CI.
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://hive-4.pl.syncad.com:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"192.168.6.136:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://172.17.0.1:8091"}
.postgres-10: &postgres-10
name: hivemind/postgres:10
alias: db
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=2GB",
"-c", "effective_cache_size=6GB",
"-c", "maintenance_work_mem=512MB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=5242kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=4",
"-c", "max_parallel_workers_per_gather=2",
"-c", "max_parallel_workers=4",
]
.postgres-12: &postgres-12
name: hivemind/postgres:12
alias: db
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=2GB",
"-c", "effective_cache_size=6GB",
"-c", "maintenance_work_mem=512MB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=5242kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=4",
"-c", "max_parallel_workers_per_gather=2",
"-c", "max_parallel_workers=4",
]
.setup-pip: &setup-pip
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- time pip install --editable .
.setup-setuptools: &setup-setuptools
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- time python setup.py develop
# no virtual environment
.setuptools: &setup-setuptools-no-venv
# setuptools will install all dependencies to this directory.
- export PYTHONUSERBASE=./local-site
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- mkdir -p `python -m site --user-site`
- python setup.py install --user --force
# we can probably also run via: ./hive/cli.py
- ln -sf ./local-site/bin/hive "$HIVEMIND_EXEC_NAME"
.setup-pipenv: &setup-pipenv
## Note, that Pipfile must exist.
## `--sequential` is slower, but doesn't emit messages about errors
## and need to repeat install.
## - pipenv sync --dev --bare --sequential
## It's faster than `--sequential`, but emits messages about errors
## and a need to repeat install, sometimes. However seems these
## errors are negligible.
- time pipenv sync --dev --bare
- source .venv/bin/activate
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
.set-variables: &set-variables
- whoami
# list all variables predefined by Gitlab CI
# - export
- echo "CI_PIPELINE_URL is $CI_PIPELINE_URL"
- echo "CI_PIPELINE_ID is $CI_PIPELINE_ID"
- echo "CI_COMMIT_SHORT_SHA is $CI_COMMIT_SHORT_SHA"
before_script:
- pwd
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
- export HIVEMIND_DB_NAME=${HIVEMIND_DB_NAME//[^a-zA-Z0-9_]/_}
- echo "HIVEMIND_DB_NAME is $HIVEMIND_DB_NAME"
- export HIVEMIND_POSTGRESQL_CONNECTION_STRING=postgresql://${HIVEMIND_POSTGRES_USER}:${HIVEMIND_POSTGRES_USER_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${HIVEMIND_DB_NAME}
.fetch-git-tags: &fetch-git-tags
# - git fetch --tags
- git tag -f ci_implicit_tag # Needed to build python package
hivemind_build:
stage: build
script:
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- when: always
.start_timer: &start-timer
- ./scripts/ci/timer.sh start
tags:
- hivemind
.stop-timer: &stop-timer
- ./scripts/ci/timer.sh check
hivemind_sync:
stage: data-supply
.hive-sync-script-common: &hive-sync-script-common
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- export POSTGRES_MAJOR_VERSION=$(./scripts/ci/get-postgres-version.sh)
- ./scripts/ci/create-db.sh
- ./scripts/ci/hive-sync.sh
- ./scripts/ci/collect-db-stats.sh
environment:
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME"
needs:
- job: hivemind_build
artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- pip3 install --user --upgrade pip setuptools
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_MAX_BLOCK $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hivemind-sync.log
expire_in: 1 week
.default-rules: &default-rules
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
......@@ -207,217 +89,187 @@ variables:
when: manual
- when: on_success
default:
image: hivemind/python:3.6
# image: hivemind/python:3.8
interruptible: false
timeout: 2h
cache: &global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key. Change this key, if you need
# to clear cache
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
after_script:
- *stop-timer
##### Jobs #####
.build-egg:
stage: build
needs: []
script:
- python setup.py bdist_egg
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 7 days
tags:
- hivemind-light-job
- hivemind
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
.build-wheel:
stage: build
needs: []
script:
- python setup.py bdist_wheel
- ls -l dist/*
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT
artifacts:
paths:
- dist/
expire_in: 7 days
- hive_server.pid
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind-light-job
- hivemind
# Postgres shared
hivemind-sync:
<<: *default-rules
stage: data-supply
needs: []
hivemind_stop_server:
stage: post-deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- when: manual
script:
- *hive-sync-script-common
artifacts:
paths:
- hivemind-sync.log
- pg-stats
expire_in: 7 days
- scripts/ci_stop_server.sh hive_server.pid
needs:
- job: hivemind_start_server
artifacts: true
tags:
- hivemind-heavy-job
- hivemind
# Postgres as service
.hivemind-sync:
<<: *default-rules
stage: data-supply
services:
- *postgres-10
# - *postgres-12
needs: []
script:
- *hive-sync-script-common
# - ./scripts/ci/dump-db.sh
artifacts:
paths:
- hivemind-sync.log
- pg-stats
- pg-dump-${HIVEMIND_DB_NAME}
expire_in: 7 hours
tags:
- hivemind-heavy-job
- hive_server.log
.hivemind_start_api_smoketest: &common_api_smoketest_job
stage: e2e-test
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
variables:
GIT_STRATEGY: none
.e2e-test-common:
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
needs:
- job: hivemind-sync
artifacts: false
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- ./scripts/ci/hive-server.sh start
after_script:
- ./scripts/ci/hive-server.sh stop
- *stop-timer
tags:
- hivemind-light-job
- hivemind
bridge_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
<<: *common_api_smoketest_job
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_patterns/ api_smoketest_bridge.xml
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
<<: *common_api_smoketest_job
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_negative/ api_smoketest_bridge_negative.xml
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
<<: *common_api_smoketest_job
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_patterns/ api_smoketest_condenser_api.xml
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
<<: *common_api_smoketest_job
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_negative/ api_smoketest_condenser_api_negative.xml
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
<<: *common_api_smoketest_job
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_patterns/ api_smoketest_database_api.xml
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
<<: *common_api_smoketest_job
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_negative/ api_smoketest_database_api_negative.xml
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
<<: *common_api_smoketest_job
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_patterns/ api_smoketest_follow_api.xml
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest_follow_api.xml
follow_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
<<: *common_api_smoketest_job
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_negative/ api_smoketest_follow_api_negative.xml
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
<<: *common_api_smoketest_job
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_patterns/ api_smoketest_tags_api.xml
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
<<: *common_api_smoketest_job
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_negative/ api_smoketest_tags_api_negative.xml
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml
artifacts:
reports:
junit: api_smoketest_tags_api_negative.xml
......@@ -238,8 +238,10 @@ class DbState:
synced_blocks = current_imported_block - last_imported_block
force_index_rebuild = False
massive_sync_preconditions = False
if synced_blocks >= SYNCED_BLOCK_LIMIT:
force_index_rebuild = True
massive_sync_preconditions = True
#is_pre_process, drop, create
cls.processing_indexes( False, force_index_rebuild, True )
......@@ -248,14 +250,24 @@ class DbState:
time_start = perf_counter()
# Update count of all child posts (what was hold during initial sync)
sql = """
select update_hive_posts_children_count({}, {})
""".format(last_imported_block, current_imported_block)
row = DbState.db().query_row(sql)
if massive_sync_preconditions:
# Update count of all child posts (what was hold during initial sync)
sql = """
select update_all_hive_posts_children_count()
"""
row = DbState.db().query_row(sql)
time_end = perf_counter()
log.info("[INIT] update_hive_posts_children_count executed in %.4fs", time_end - time_start)
time_end = perf_counter()
log.info("[INIT] update_all_hive_posts_children_count executed in %.4fs", time_end - time_start)
else:
# Update count of child posts processed during partial sync (what was hold during initial sync)
sql = """
select update_hive_posts_children_count({}, {})
""".format(last_imported_block, current_imported_block)
row = DbState.db().query_row(sql)
time_end = perf_counter()
log.info("[INIT] update_hive_posts_children_count executed in %.4fs", time_end - time_start)
time_start = perf_counter()
......@@ -335,7 +347,7 @@ class DbState:
cls.update_work_mem(current_work_mem)
if synced_blocks >= SYNCED_BLOCK_LIMIT:
if massive_sync_preconditions:
from hive.db.schema import create_fk, set_logged_table_attribute
# intentionally disabled since it needs a lot of WAL disk space when switching back to LOGGED
#set_logged_table_attribute(cls.db(), True)
......
......@@ -508,6 +508,25 @@ def setup(db):
"""
db.query_no_return(sql)
sql = """
CREATE TABLE IF NOT EXISTS hive_db_patch_level
(
level SERIAL NOT NULL PRIMARY KEY,
patch_date timestamp without time zone NOT NULL,
patched_to_revision TEXT
);
"""
db.query_no_return(sql)
sql = """
INSERT INTO hive_db_patch_level
(patch_date, patched_to_revision)
values
(now(), '{}');
"""
from hive.version import GIT_REVISION
db.query_no_return(sql.format(GIT_REVISION))
# max_time_stamp definition moved into utility_functions.sql
# get_discussion definition moved to bridge_get_discussion.sql
......@@ -565,6 +584,8 @@ def setup(db):
dir_path = dirname(realpath(__file__))
for script in sql_scripts:
execute_sql_script(db.query_no_return, "{}/sql_scripts/{}".format(dir_path, script))
......
#!/bin/bash
set -e
set -o pipefail
echo "Usage ./db_upgrade.sh <user-name> <db-name>"
rm -f ./upgrade.log
for sql in postgres_handle_view_changes.sql \
upgrade/upgrade_table_schema.sql \
utility_functions.sql \
hive_accounts_view.sql \
hive_accounts_info_view.sql \
hive_posts_base_view.sql \
hive_posts_view.sql \
hive_votes_view.sql \
hive_post_operations.sql \
head_block_time.sql \
update_feed_cache.sql \
payout_stats_view.sql \
update_hive_posts_mentions.sql \
find_tag_id.sql \
bridge_get_ranked_post_type.sql \
bridge_get_ranked_post_for_communities.sql \
bridge_get_ranked_post_for_observer_communities.sql \
bridge_get_ranked_post_for_tag.sql \
bridge_get_ranked_post_for_all.sql \
calculate_account_reputations.sql \
update_communities_rank.sql \
delete_hive_posts_mentions.sql \
notifications_view.sql \
notifications_api.sql \
bridge_get_account_posts_by_comments.sql \
bridge_get_account_posts_by_payout.sql \
bridge_get_account_posts_by_posts.sql \
bridge_get_account_posts_by_replies.sql \
bridge_get_relationship_between_accounts.sql \
bridge_get_post.sql \
bridge_get_discussion.sql \
condenser_api_post_type.sql \
condenser_api_post_ex_type.sql \
condenser_get_blog.sql \
condenser_get_content.sql \
condenser_get_discussions_by_created.sql \
condenser_get_discussions_by_blog.sql \
hot_and_trends.sql \
condenser_get_discussions_by_trending.sql \
condenser_get_discussions_by_hot.sql \
condenser_get_discussions_by_promoted.sql \
condenser_get_post_discussions_by_payout.sql \
condenser_get_comment_discussions_by_payout.sql \
update_hive_posts_children_count.sql \
update_hive_posts_api_helper.sql \
database_api_list_comments.sql \
database_api_list_votes.sql \
update_posts_rshares.sql \
update_hive_post_root_id.sql
do
echo Executing psql -U $1 -d $2 -f $sql
time psql -1 -v "ON_ERROR_STOP=1" -U $1 -d $2 -c '\timing' -f $sql 2>&1 | tee -a -i upgrade.log
echo $?
done
time psql -v "ON_ERROR_STOP=1" -U $1 -d $2 -c '\timing' -f upgrade/upgrade_runtime_migration.sql 2>&1 | tee -a -i upgrade.log
DROP VIEW IF EXISTS public.hive_accounts_view;
DROP VIEW IF EXISTS public.hive_accounts_view CASCADE;
CREATE OR REPLACE VIEW public.hive_accounts_view
AS
......
......@@ -25,7 +25,10 @@ FROM
, rank() OVER(order by ha3.reputation DESC) as rank
FROM hive_accounts ha3
ORDER BY ha3.reputation DESC LIMIT 150000
-- only 2% of account has the same reputations, it means only 2000 in 100000, but we get 150000 as 50% would repeat
-- Conditions above (related to rank.position) eliminates all records having rank > 100k. So with inclding some
-- additional space for redundant accounts (having same reputation) lets assume we're limiting it to 150k
-- As another reason, it can be pointed that only 2% of account has the same reputations, it means only 2000
-- in 100000, but we get 150000 as 50% would repeat
) as ha2 ON ha2.id = ha.id
) rank
;
......@@ -58,7 +61,7 @@ $function$
LANGUAGE plpgsql IMMUTABLE
;
DROP FUNCTION IF EXISTS public.calculate_value_of_vote_on_post;
DROP FUNCTION IF EXISTS public.calculate_value_of_vote_on_post CASCADE;
CREATE OR REPLACE FUNCTION public.calculate_value_of_vote_on_post(
_post_payout hive_posts.payout%TYPE
, _post_rshares hive_posts_view.rshares%TYPE
......
......@@ -60,3 +60,52 @@ WHERE uhp.id = data_source.queried_parent
;
END
$BODY$;
DROP FUNCTION IF EXISTS public.update_all_hive_posts_children_count;
CREATE OR REPLACE FUNCTION public.update_all_hive_posts_children_count()
RETURNS void
LANGUAGE 'plpgsql'
VOLATILE
AS $BODY$
declare __depth INT;
BEGIN
SELECT MAX(hp.depth) into __depth FROM hive_posts hp ;
CREATE UNLOGGED TABLE IF NOT EXISTS __post_children
(
id INT NOT NULL,
child_count INT NOT NULL,
CONSTRAINT __post_children_pkey PRIMARY KEY (id)
);
TRUNCATE TABLE __post_children;
WHILE __depth >= 0 LOOP
INSERT INTO __post_children
(id, child_count)
SELECT
h1.parent_id AS queried_parent,
SUM(COALESCE((SELECT pc.child_count FROM __post_children pc WHERE pc.id = h1.id),
0
) + 1
) AS count
FROM hive_posts h1
WHERE (h1.parent_id != 0 OR __depth = 0) AND h1.counter_deleted = 0 AND h1.id != 0 AND h1.depth = __depth
GROUP BY h1.parent_id
ON CONFLICT ON CONSTRAINT __post_children_pkey DO UPDATE
SET child_count = __post_children.child_count + excluded.child_count
;
__depth := __depth -1;
END LOOP;
UPDATE hive_posts uhp
SET children = s.child_count
FROM
__post_children s
WHERE s.id = uhp.id and s.child_count != uhp.children
;
END
$BODY$;
......@@ -10,6 +10,7 @@ AS
$BODY$
BEGIN
SET LOCAL work_mem='2GB';
SET LOCAL enable_seqscan=False;
UPDATE hive_posts hp
SET
abs_rshares = votes_rshares.abs_rshares
......@@ -34,6 +35,7 @@ FROM
WHERE hp.id = votes_rshares.post_id
AND (hp.abs_rshares != votes_rshares.abs_rshares OR hp.vote_rshares != votes_rshares.rshares);
RESET work_mem;
RESET enable_seqscan;
END;
$BODY$
;
START TRANSACTION;
DO
$BODY$
BEGIN
SET work_mem='2GB';
IF EXISTS(SELECT * FROM hive_db_data_migration WHERE migration = 'Reputation calculation') THEN
RAISE NOTICE 'Performing initial account reputation calculation...';
PERFORM update_account_reputations(NULL, NULL);
ELSE
RAISE NOTICE 'Skipping initial account reputation calculation...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
IF EXISTS(SELECT * FROM hive_db_data_migration WHERE migration = 'hive_posts_api_helper fill') THEN
RAISE NOTICE 'Performing initial hive_posts_api_helper collection...';
SET work_mem='2GB';
TRUNCATE TABLE hive_posts_api_helper;
DROP INDEX IF EXISTS hive_posts_api_helper_author_permlink_idx;
DROP INDEX IF EXISTS hive_posts_api_helper_author_s_permlink_idx;
PERFORM update_hive_posts_api_helper(NULL, NULL);
CREATE INDEX IF NOT EXISTS hive_posts_api_helper_author_s_permlink_idx ON hive_posts_api_helper (author_s_permlink);
ELSE
RAISE NOTICE 'Skipping initial hive_posts_api_helper collection...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
IF EXISTS(SELECT * FROM hive_db_data_migration WHERE migration = 'hive_mentions fill') THEN
RAISE NOTICE 'Performing initial post body mentions collection...';
SET work_mem='2GB';
DROP INDEX IF EXISTS hive_mentions_block_num_idx;
PERFORM update_hive_posts_mentions(0, (SELECT hb.num FROM hive_blocks hb ORDER BY hb.num DESC LIMIT 1) );
CREATE INDEX IF NOT EXISTS hive_mentions_block_num_idx ON hive_mentions (block_num);
ELSE
RAISE NOTICE 'Skipping initial post body mentions collection...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
IF EXISTS (SELECT * FROM hive_db_data_migration WHERE migration = 'update_posts_rshares( 0, head_block_number) execution') THEN
RAISE NOTICE 'Performing posts rshares, hot and trend recalculation on range ( 0, head_block_number)...';
SET work_mem='2GB';
PERFORM update_posts_rshares(0, (SELECT hb.num FROM hive_blocks hb ORDER BY hb.num DESC LIMIT 1) );
DELETE FROM hive_db_data_migration WHERE migration = 'update_posts_rshares( 0, head_block_number) execution';
ELSE
RAISE NOTICE 'Skipping update_posts_rshares( 0, head_block_number) recalculation...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
IF EXISTS (SELECT * FROM hive_db_data_migration WHERE migration = 'update_hive_posts_children_count execution') THEN
RAISE NOTICE 'Performing initial post children count execution ( 0, head_block_number)...';
SET work_mem='2GB';
update hive_posts set children = 0 where children != 0;
PERFORM update_all_hive_posts_children_count();
DELETE FROM hive_db_data_migration WHERE migration = 'update_hive_posts_children_count execution';
ELSE
RAISE NOTICE 'Skipping initial post children count execution ( 0, head_block_number) recalculation...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
DO
$BODY$
BEGIN
IF EXISTS (SELECT * FROM hive_db_data_migration WHERE migration = 'update_hive_post_mentions refill execution') THEN
RAISE NOTICE 'Performing hive_mentions refill...';
SET work_mem='2GB';
TRUNCATE TABLE hive_mentions RESTART IDENTITY;
PERFORM update_hive_posts_mentions(0, (select max(num) from hive_blocks));
DELETE FROM hive_db_data_migration WHERE migration = 'update_hive_post_mentions refill execution';
ELSE
RAISE NOTICE 'Skipping hive_mentions refill...';
END IF;
END
$BODY$;
COMMIT;
START TRANSACTION;
TRUNCATE TABLE hive_db_data_migration;
insert into hive_db_patch_level
(patch_date, patched_to_revision)
select ds.patch_date, ds.patch_revision
from
(
values
(now(), '7b8def051be224a5ebc360465f7a1522090c7125'),
(now(), 'e17bfcb08303cbf07b3ce7d1c435d59a368b4a9e'),
(now(), '0be8e6e8b2121a8f768113e35e47725856c5da7c'), -- update_hot_and_trending_for_blocks fix, https://gitlab.syncad.com/hive/hivemind/-/merge_requests/247
(now(), '26c2f1862770178d4575ec09e9f9c225dcf3d206'), -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/252
(now(), 'e8b65adf22654203f5a79937ff2a95c5c47e10c5'), -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/251
(now(), '8d0b673e7c40c05d2b8ae74ccf32adcb6b11f906'), -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/265
-- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/281
-- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/282
-- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/257
-- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/251
-- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/265
--
(now(), '45c2883131472cc14a03fe4e355ba1435020d720'),
(now(), '7cfc2b90a01b32688075b22a6ab173f210fc770f'), -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/286
(now(), 'f2e5f656a421eb1dd71328a94a421934eda27a87') -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/275
,(now(), '4cdf5d19f6cfcb73d3fa504cac9467c4df31c02e') -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/295
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/294
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/298
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/301
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/297
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/302
,(now(), '166327bfa87beda588b20bfcfa574389f4100389')
,(now(), '88e62bdf1fcc47809fec84424cf98c71ce87ca89') -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/310
,(now(), 'f8ecf376da5e0efef64b79f91e9803eac8b163a4') -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/289
,(now(), '0e3c8700659d98b45f1f7146dc46a195f905fc2d') -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/306 update posts children count fix
,(now(), '9e126e9d762755f2b9a0fd68f076c9af6bb73b76') -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/314 mentions fix
,(now(), '033619277eccea70118a5b8dc0c73b913da0025f') -- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/326 https://gitlab.syncad.com/hive/hivemind/-/merge_requests/322 posts rshares recalc
,(now(), '1847c75702384c7e34c624fc91f24d2ef20df91d') -- latest version of develop containing included changes.
) ds (patch_date, patch_revision)
where not exists (select null from hive_db_patch_level hpl where hpl.patched_to_revision = ds.patch_revision);
COMMIT;
;
CREATE TABLE IF NOT EXISTS hive_db_patch_level
(
level SERIAL NOT NULL PRIMARY KEY,
patch_date timestamp without time zone NOT NULL,
patched_to_revision TEXT
);
CREATE TABLE IF NOT EXISTS hive_db_data_migration
(
migration varchar(128) not null
);
DO $$
BEGIN
EXECUTE 'ALTER DATABASE '||current_database()||' SET join_collapse_limit TO 16';
EXECUTE 'ALTER DATABASE '||current_database()||' SET from_collapse_limit TO 16';
END
$$;
SHOW join_collapse_limit;
SHOW from_collapse_limit;
DO
$BODY$
BEGIN
IF NOT EXISTS(SELECT data_type
FROM information_schema.columns
WHERE table_name = 'hive_accounts' AND column_name = 'is_implicit') THEN
RAISE NOTICE 'Performing hive_accounts upgrade - adding new column is_implicit';
PERFORM deps_save_and_drop_dependencies('public', 'hive_accounts', true);
alter table ONlY hive_accounts
add column is_implicit boolean,
alter column is_implicit set default True;
--- reputations have to be recalculated from scratch.
update hive_accounts set reputation = 0, is_implicit = True;
alter table ONlY hive_accounts
alter column is_implicit set not null;
perform deps_restore_dependencies('public', 'hive_accounts');
INSERT INTO hive_db_data_migration VALUES ('Reputation calculation');
ELSE
RAISE NOTICE 'hive_accounts::is_implicit migration skipped';
END IF;
IF EXISTS(SELECT data_type
FROM information_schema.columns
WHERE table_name = 'hive_accounts' AND column_name = 'blacklist_description') THEN
RAISE NOTICE 'Performing hive_accounts upgrade - removing columns blacklist_description/muted_list_description';
-- drop hive_accounts_info_view since it uses removed column. It will be rebuilt after upgrade
DROP VIEW IF EXISTS hive_accounts_info_view;
PERFORM deps_save_and_drop_dependencies('public', 'hive_accounts', true);
ALTER TABLE ONlY hive_accounts
DROP COLUMN IF EXISTS blacklist_description,
DROP COLUMN IF EXISTS muted_list_description
;
ELSE
RAISE NOTICE 'hive_accounts::blacklist_description/muted_list_description migration skipped';
END IF;
END
$BODY$;
DROP TABLE IF EXISTS hive_account_reputation_status;
drop index if exists hive_posts_sc_hot_idx;
drop index if exists hive_posts_sc_trend_idx;
drop index if exists hive_reblogs_blogger_id;
drop index if exists hive_subscriptions_community_idx;
drop index if exists hive_votes_post_id_idx;
drop index if exists hive_votes_voter_id_idx;
drop index if exists hive_votes_last_update_idx;
CREATE INDEX IF NOT EXISTS hive_posts_cashout_time_id_idx ON hive_posts (cashout_time, id);
CREATE INDEX IF NOT EXISTS hive_posts_updated_at_idx ON hive_posts (updated_at DESC);
CREATE INDEX IF NOT EXISTS hive_votes_block_num_idx ON hive_votes (block_num);
DO
$BODY$
BEGIN
IF NOT EXISTS(SELECT data_type
FROM information_schema.columns
WHERE table_name = 'hive_posts_api_helper' AND column_name = 'author_s_permlink') THEN
RAISE NOTICE 'Performing hive_posts_api_helper upgrade - adding new column author_s_permlink';
PERFORM deps_save_and_drop_dependencies('public', 'hive_posts_api_helper', true);
DROP INDEX IF EXISTS hive_posts_api_helper_parent_permlink_or_category;
DROP TABLE IF EXISTS hive_posts_api_helper;
CREATE TABLE public.hive_posts_api_helper
(
id integer NOT NULL,
author_s_permlink character varying(275) COLLATE pg_catalog."C" NOT NULL,
CONSTRAINT hive_posts_api_helper_pkey PRIMARY KEY (id)
);
perform deps_restore_dependencies('public', 'hive_posts_api_helper');
CREATE INDEX IF NOT EXISTS hive_posts_api_helper_author_s_permlink_idx ON hive_posts_api_helper (author_s_permlink);
INSERT INTO hive_db_data_migration VALUES ('hive_posts_api_helper fill');
ELSE
RAISE NOTICE 'hive_posts_api_helper migration skipped';
END IF;
END
$BODY$
;
DO
$BODY$
BEGIN
IF NOT EXISTS(SELECT data_type
FROM information_schema.columns
WHERE table_name = 'hive_mentions' AND column_name = 'block_num') THEN
RAISE NOTICE 'Performing hive_mentions upgrade - adding new column block_num';
TRUNCATE TABLE public.hive_mentions RESTART IDENTITY;
PERFORM deps_save_and_drop_dependencies('public', 'hive_mentions', true);
ALTER TABLE hive_mentions
DROP CONSTRAINT IF EXISTS hive_mentions_pk,
ADD COLUMN IF NOT EXISTS id SERIAL,
ADD COLUMN IF NOT EXISTS block_num INTEGER,
ALTER COLUMN id SET NOT NULL,
ALTER COLUMN block_num SET NOT NULL,
ADD CONSTRAINT hive_mentions_pk PRIMARY KEY (id);
perform deps_restore_dependencies('public', 'hive_mentions');
INSERT INTO hive_db_data_migration VALUES ('hive_mentions fill');
END IF;
END
$BODY$
;
---------------------------------------------------------------------------------------------------
-- hive_posts table migration
---------------------------------------------------------------------------------------------------
DO
$BODY$
BEGIN
IF EXISTS(SELECT data_type
FROM information_schema.columns
WHERE table_name = 'hive_posts' AND column_name = 'is_grayed') THEN
RAISE NOTICE 'Performing hive_posts upgrade - dropping is_grayed column';
--- Warning we need to first drop hive_posts view since it references column is_grayed, which will be dropped.
--- Saving it in the dependencies, will restore wrong (old) definition of the view and make an error.
DROP VIEW IF EXISTS hive_posts_view CASCADE;
PERFORM deps_save_and_drop_dependencies('public', 'hive_posts', true);
ALTER TABLE hive_posts
DROP COLUMN IF EXISTS is_grayed;
perform deps_restore_dependencies('public', 'hive_posts');
ELSE
RAISE NOTICE 'hive_posts upgrade - SKIPPED dropping is_grayed column';
END IF;
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/322
IF EXISTS(SELECT data_type FROM information_schema.columns
WHERE table_name = 'hive_posts' AND column_name = 'abs_rshares' AND data_type = 'bigint') AND
EXISTS(SELECT data_type FROM information_schema.columns
WHERE table_name = 'hive_posts' AND column_name = 'vote_rshares' AND data_type = 'bigint') AND
NOT EXISTS (SELECT data_type FROM information_schema.columns
WHERE table_name = 'hive_posts' AND column_name = 'block_num_created') THEN
RAISE NOTICE 'Performing hive_posts upgrade - adding block_num_created column, type change for abs_rshares/vote_rshares columns';
PERFORM deps_save_and_drop_dependencies('public', 'hive_posts', true);
ALTER TABLE ONLY hive_posts
ALTER COLUMN abs_rshares SET DATA TYPE NUMERIC,
ALTER COLUMN vote_rshares SET DATA TYPE NUMERIC,
ADD COLUMN block_num_created INTEGER;
UPDATE hive_posts SET block_num_created = 1; -- Artificial number, since we don't have this info atm, it requires full sync
ALTER TABLE ONLY hive_posts
ALTER COLUMN block_num_created set not null;
perform deps_restore_dependencies('public', 'hive_posts');
ELSE
RAISE NOTICE 'SKIPPING hive_posts upgrade - adding a block_num_created column, type change for abs_rshares/vote_rshares columns';
END IF;
END
$BODY$
;
DROP INDEX IF EXISTS hive_posts_created_at_idx;
CREATE INDEX IF NOT EXISTS hive_posts_created_at_author_id_idx ON hive_posts (created_at, author_id);
CREATE INDEX IF NOT EXISTS hive_posts_block_num_created_idx ON hive_posts (block_num_created);
DROP INDEX IF EXISTS hive_mentions_post_id_idx;
-- updated up to 7b8def051be224a5ebc360465f7a1522090c7125
-- updated up to 033619277eccea70118a5b8dc0c73b913da0025f
INSERT INTO hive_db_data_migration
select 'update_posts_rshares( 0, head_block_number) execution'
where not exists (select null from hive_db_patch_level where patched_to_revision = '033619277eccea70118a5b8dc0c73b913da0025f')
;
-- updated to e8b65adf22654203f5a79937ff2a95c5c47e10c5 - See merge request hive/hivemind!251
-- COMMENTED OUT DUE TO MRs:processed below.
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/298
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/302
--CREATE INDEX IF NOT EXISTS hive_posts_is_paidout_idx ON hive_posts (is_paidout);
--CREATE INDEX IF NOT EXISTS hive_posts_payout_plus_pending_payout_id ON hive_posts ((payout+pending_payout), id);
INSERT INTO hive_tag_data (id, tag) VALUES (0, '')
ON CONFLICT DO NOTHING;
--- updated to f2e5f656a421eb1dd71328a94a421934eda27a87 - See MR https://gitlab.syncad.com/hive/hivemind/-/merge_requests/275
DO
$BODY$
BEGIN
IF NOT EXISTS(SELECT data_type
FROM information_schema.columns
WHERE table_name = 'hive_follows' AND column_name = 'follow_muted') THEN
RAISE NOTICE 'Performing hive_follows upgrade - adding new column follow_muted';
PERFORM deps_save_and_drop_dependencies('public', 'hive_follows', true);
alter table ONLY hive_follows
add column follow_muted boolean,
alter column follow_muted set default False;
--- Fill the default value for all existing records.
update hive_follows set follow_muted = False;
alter table ONlY hive_follows
alter column follow_muted set not null;
perform deps_restore_dependencies('public', 'hive_follows');
ELSE
RAISE NOTICE 'hive_follows::follow_muted migration skipped';
END IF;
END
$BODY$;
--- 4cdf5d19f6cfcb73d3fa504cac9467c4df31c02e - https://gitlab.syncad.com/hive/hivemind/-/merge_requests/295
--- 9e126e9d762755f2b9a0fd68f076c9af6bb73b76 - https://gitlab.syncad.com/hive/hivemind/-/merge_requests/314 mentions fix
INSERT INTO hive_db_data_migration
select 'update_hive_post_mentions refill execution'
where not exists (select null from hive_db_patch_level where patched_to_revision = '9e126e9d762755f2b9a0fd68f076c9af6bb73b76' )
;
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/298
DROP INDEX IF EXISTS hive_posts_is_paidout_idx;
DROP INDEX IF EXISTS hive_posts_sc_trend_id_idx;
DROP INDEX IF EXISTS hive_posts_sc_hot_id_idx;
CREATE INDEX IF NOT EXISTS hive_posts_sc_trend_id_is_paidout_idx ON hive_posts(sc_trend, id, is_paidout );
CREATE INDEX IF NOT EXISTS hive_posts_sc_hot_id_is_paidout_idx ON hive_posts(sc_hot, id, is_paidout );
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/302
DROP INDEX IF EXISTS hive_posts_payout_plus_pending_payout_id;
CREATE INDEX IF NOT EXISTS hive_posts_payout_plus_pending_payout_id_is_paidout_idx ON hive_posts ((payout+pending_payout), id, is_paidout);
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/310
CREATE INDEX IF NOT EXISTS hive_votes_voter_id_last_update_idx ON hive_votes (voter_id, last_update);
--- https://gitlab.syncad.com/hive/hivemind/-/merge_requests/306 update posts children count fix
--- 0e3c8700659d98b45f1f7146dc46a195f905fc2d
INSERT INTO hive_db_data_migration
select 'update_hive_posts_children_count execution'
where not exists (select null from hive_db_patch_level where patched_to_revision = '0e3c8700659d98b45f1f7146dc46a195f905fc2d' )
;
--- 1847c75702384c7e34c624fc91f24d2ef20df91d latest version of develop included in this migration script.
--- Rename hive_votes_ux1 unique constraint to the hive_votes_voter_id_author_id_permlink_id_uk
DO $$
BEGIN
IF EXISTS (SELECT * FROM pg_constraint WHERE conname='hive_votes_ux1') THEN
RAISE NOTICE 'Attempting to rename hive_votes_ux1 to hive_votes_voter_id_author_id_permlink_id_uk...';
ALTER TABLE hive_votes RENAME CONSTRAINT hive_votes_ux1 to hive_votes_voter_id_author_id_permlink_id_uk;
END IF;
END
$$
......@@ -436,8 +436,8 @@ class Blocks:
"SELECT update_hive_posts_root_id({},{})".format(first_block, last_block),
"SELECT update_hive_posts_api_helper({},{})".format(first_block, last_block),
"SELECT update_feed_cache({}, {})".format(first_block, last_block),
"SELECT update_hive_posts_mentions({}, {})".format(first_block, last_block),
"SELECT update_account_reputations({}, {})".format(first_block, last_block)
"SELECT update_hive_posts_mentions({}, {})".format(first_block, last_block)
#,"SELECT update_account_reputations({}, {})".format(first_block, last_block)
]
for query in queries:
......
......@@ -277,7 +277,7 @@ class Follow(DbAdapterHolder):
UPDATE
hive_follows hf
SET
hf.blacklisted = false
blacklisted = false
FROM
(
SELECT
......@@ -298,7 +298,7 @@ class Follow(DbAdapterHolder):
UPDATE
hive_follows hf
SET
hf.state = 0
state = 0
FROM
(
SELECT
......@@ -320,7 +320,7 @@ class Follow(DbAdapterHolder):
UPDATE
hive_follows hf
SET
hf.state = 0
state = 0
FROM
(
SELECT
......@@ -342,7 +342,7 @@ class Follow(DbAdapterHolder):
UPDATE
hive_follows hf
SET
hf.follow_blacklists = false
follow_blacklists = false
FROM
(
SELECT
......@@ -383,7 +383,7 @@ class Follow(DbAdapterHolder):
UPDATE
hive_follows hf
SET
hf.follow_muted = false
follow_muted = false
FROM
(
SELECT
......@@ -401,7 +401,7 @@ class Follow(DbAdapterHolder):
UPDATE
hive_follows hf
SET
hf.follow_muted = true
follow_muted = true
FROM
(
SELECT
......@@ -423,10 +423,10 @@ class Follow(DbAdapterHolder):
UPDATE
hive_follows hf
SET
hf.blacklisted = false,
hf.follow_blacklists = false,
hf.follow_muted = false,
hf.state = 0
blacklisted = false,
follow_blacklists = false,
follow_muted = false,
state = 0
FROM
(
SELECT
......@@ -444,8 +444,8 @@ class Follow(DbAdapterHolder):
UPDATE
hive_follows hf
SET
hf.follow_blacklists = true,
hf.follow_muted = true
follow_blacklists = true,
follow_muted = true
FROM
(
SELECT
......
#!/bin/bash
# TODO This script needs review.
set -euo pipefail
HIVEMIND_PID=0
......@@ -9,16 +11,27 @@ START_DELAY=5
# For debug only!
# HIVED_URL='{"default":"http://hived-node:8091"}'
# HIVED_URL='{"default":"http://172.17.0.1:8091"}'
# HIVED_URL='{"default":"http://127.0.0.1:8091"}'
# HIVEMIND_HTTP_PORT="8080"
# HIVEMIND_POSTGRESQL_CONNECTION_STRING="postgresql://syncad:devdev@localhost:5432/hive_test"
check_pid() {
if [ -f hive_server.pid ]; then
HIVEMIND_PID=`cat hive_server.pid`
if ps -p $HIVEMIND_PID > /dev/null
then
# Process is running
echo "Process pid $HIVEMIND_PID is running"
else
# Process is not running
rm hive_server.pid
HIVEMIND_PID=0
fi
else
HIVEMIND_PID=0
fi
}
stop() {
if [ "$HIVEMIND_PID" -gt "0" ]; then
HIVEMIND_PID=`cat hive_server.pid`
......@@ -61,16 +74,34 @@ start() {
for i in `seq 1 10`; do
if [ -f hive_server.pid ]; then
echo "Hive server has been started (pid $HIVEMIND_PID)"
echo "Starting hive server (pid $HIVEMIND_PID)"
# Wait some time to allow its initialization.
sleep $START_DELAY
exit 0
# Check if process is still running.
if ps -p $HIVEMIND_PID > /dev/null
then
echo "Hive server is running (pid $HIVEMIND_PID)"
exit 0
else
# Check if process executed successfully or not.
if wait $HIVEMIND_PID; then
echo "Hive server has been started (pid $HIVEMIND_PID)"
exit 0
else
RESULT=$?
echo "Hive server terminated abnormally (returned $RESULT)"
rm hive_server.pid;
exit $RESULT
fi
fi
else
sleep 1
fi
done
# If we are here something went wrong
# If we are here, something went wrong.
echo "Timeout reached. Hive server has not been started, exiting."
rm hive_server.pid;
exit 1
}
......