Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • hive/hivemind
1 result
Show changes
Commits on Source (18)
Showing
with 1981 additions and 385 deletions
......@@ -144,3 +144,5 @@ Pipfile.lock
pghero.yml
*~
.tmp
.private
stages:
- build
- test
- data-supply
- deploy
- e2e-test
- benchmark-tests
- post-deploy
variables:
GIT_DEPTH: 1
LC_ALL: "C"
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG"
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID))
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
default:
before_script:
- pwd
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
hivemind_build:
stage: build
script:
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
tags:
- hivemind
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
hivemind_sync:
stage: data-supply
environment:
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME"
needs:
- job: hivemind_build
artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- pip3 install --user --upgrade pip setuptools
# WARNING!!! temporarily hardcoded 5000017 instead $HIVEMIND_MAX_BLOCK
# revert that change when $HIVEMIND_MAX_BLOCK will be set to 5000017
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" 5000017 $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hivemind-sync.log
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hive_server.pid
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_stop_server:
stage: post-deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
script:
- scripts/ci_stop_server.sh hive_server.pid
needs:
- job: hivemind_start_server
artifacts: true
tags:
- hivemind
artifacts:
paths:
- hive_server.log
.hivemind_start_api_smoketest: &common_api_smoketest_job
stage: e2e-test
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
bridge_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest.xml
follow_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml
mock_tests:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" mock_tests/ api_smoketest_mock_tests.xml
api_smoketest_benchmark:
stage: benchmark-tests
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
allow_failure: true
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
script:
- ./scripts/ci_start_api_benchmarks.sh localhost $HIVEMIND_HTTP_PORT 5
artifacts:
when: always
paths:
- tavern_benchmarks_report.html
# https://hub.docker.com/r/library/python/tags/
image: "python:3.7"
stages:
- build
- test
- data-supply
- deploy
- e2e-test
- benchmark-tests
- post-deploy
variables:
GIT_DEPTH: 1
LC_ALL: "C"
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG"
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID))
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
before_script:
- pwd
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID"
- build
- test
- data-supply
- deploy
- e2e-test
- benchmark-tests
- post-deploy
.dk-setup-pip: &dk-setup-pip
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- time pip install --editable .[dev]
.dk-setup-runner-env: &dk-setup-runner-env
# Setup runner environment (to connect to correct postgres server, mainly).
- TMP_VAR=$(cat hive-sync-runner-id.txt 2>/dev/null || true); export HIVE_SYNC_RUNNER_ID=${TMP_VAR:-0}
- eval $(cat "$RUNNER_CONF" | ./scripts/ci/setup_env.py --current-runner-id=${CI_RUNNER_ID} --hive-sync-runner-id=${HIVE_SYNC_RUNNER_ID})
.dk-set-variables: &dk-set-variables
# - export # List all variables and its values set by Gitlab CI.
- whoami
- echo "CI_RUNNER_ID is $CI_RUNNER_ID"
- echo "CI_PIPELINE_URL is $CI_PIPELINE_URL"
- echo "CI_PIPELINE_ID is $CI_PIPELINE_ID"
- echo "CI_COMMIT_SHORT_SHA is $CI_COMMIT_SHORT_SHA"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
- export HIVEMIND_DB_NAME=${HIVEMIND_DB_NAME//[^a-zA-Z0-9_]/_}
- echo "HIVEMIND_DB_NAME is $HIVEMIND_DB_NAME"
hivemind_build:
stage: build
script:
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- when: always
tags:
- hivemind
hivemind_sync:
stage: data-supply
environment:
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME"
needs:
- job: hivemind_build
artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
.dk-fetch-git-tags: &dk-fetch-git-tags
# - git fetch --tags # Looks to be unnecessary.
- git tag -f ci_implicit_tag # Needed to build python package
script:
- pip3 install --user --upgrade pip setuptools
# WARNING: hardcoded 5000017 for max block
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" 5000017 $HIVEMIND_HTTP_PORT
.dk-start-timer: &dk-start-timer
- ./scripts/ci/timer.sh start
artifacts:
paths:
- hivemind-sync.log
.dk-stop-timer: &dk-stop-timer
- ./scripts/ci/timer.sh check
expire_in: 1 week
.dk-hive-sync-script-common: &dk-hive-sync-script-common
- echo "${CI_RUNNER_ID}" > hive-sync-runner-id.txt
- ./scripts/ci/wait-for-postgres.sh "$RUNNER_POSTGRES_HOST" "$RUNNER_POSTGRES_PORT"
- export POSTGRES_MAJOR_VERSION=$(./scripts/ci/get-postgres-version.sh)
- ./scripts/ci/create-db.sh
- ./scripts/ci/hive-sync.sh
- ./scripts/ci/collect-db-stats.sh
.dk-rules-for-sync: &dk-rules-for-sync
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hive_server.pid
expire_in: 1 week
- when: manual
.dk-rules-for-test: &dk-rules-for-test
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
when: on_success
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
when: on_success
- when: on_success
tags:
- hivemind
hivemind_stop_server:
stage: post-deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
.dk-default:
image: hivemind/python:3.6
interruptible: true
inherit:
default: false
variables: false
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- when: manual
GIT_DEPTH: 10
GIT_STRATEGY: fetch
GIT_SUBMODULE_STRATEGY: recursive
PIPENV_VENV_IN_PROJECT: 1
PIPENV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pipenv"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
POSTGRES_CLIENT_TOOLS_PATH: /usr/lib/postgresql
HIVEMIND_DB_NAME: "hive_${CI_COMMIT_REF_SLUG}"
cache: &dk-global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key.
# Change this key, if you need to clear cache.
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- *dk-start-timer
- *dk-fetch-git-tags
- *dk-set-variables
- *dk-setup-pip
- *dk-setup-runner-env
after_script:
- *dk-stop-timer
##### Jobs #####
dk-hivemind-sync:
# Postgres shared on host.
extends: .dk-default
<<: *dk-rules-for-sync
stage: data-supply
needs: []
script:
- scripts/ci_stop_server.sh hive_server.pid
needs:
- job: hivemind_start_server
artifacts: true
tags:
- hivemind
- *dk-hive-sync-script-common
artifacts:
paths:
- hive_server.log
- hivemind-sync.log
- pg-stats
- hive-sync-runner-id.txt
expire_in: 7 days
tags:
- hivemind-heavy-job
.hivemind_start_api_smoketest: &common_api_smoketest_job
stage: e2e-test
environment: hive-4.pl.syncad.com
.dk-test-common:
extends: .dk-default
<<: *dk-rules-for-test
needs:
- job: hivemind_start_server
- job: dk-hivemind-sync
artifacts: true
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
allow_failure: false
before_script:
- *dk-start-timer
- *dk-fetch-git-tags
- *dk-set-variables
- *dk-setup-pip
- *dk-setup-runner-env
- ./scripts/ci/wait-for-postgres.sh "$RUNNER_POSTGRES_HOST" "$RUNNER_POSTGRES_PORT"
- ./scripts/ci/hive-server.sh start
after_script:
- *dk-stop-timer
tags:
- hivemind
- hivemind-light-job
bridge_api_smoketest:
<<: *common_api_smoketest_job
dk-bridge_api_smoketest:
stage: e2e-test
extends: .dk-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
bridge_api_patterns/ api_smoketest_bridge.xml \
$RUNNER_TEST_JOBS
artifacts:
when: always
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
<<: *common_api_smoketest_job
dk-bridge_api_smoketest_negative:
stage: e2e-test
extends: .dk-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
bridge_api_negative/ api_smoketest_bridge_negative.xml \
$RUNNER_TEST_JOBS
artifacts:
when: always
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
<<: *common_api_smoketest_job
dk-condenser_api_smoketest:
stage: e2e-test
extends: .dk-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
condenser_api_patterns/ api_smoketest_condenser_api.xml \
$RUNNER_TEST_JOBS
artifacts:
when: always
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
<<: *common_api_smoketest_job
dk-condenser_api_smoketest_negative:
stage: e2e-test
extends: .dk-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
condenser_api_negative/ api_smoketest_condenser_api_negative.xml \
$RUNNER_TEST_JOBS
artifacts:
when: always
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
<<: *common_api_smoketest_job
dk-database_api_smoketest:
stage: e2e-test
extends: .dk-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
database_api_patterns/ api_smoketest_database_api.xml \
$RUNNER_TEST_JOBS
artifacts:
when: always
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
<<: *common_api_smoketest_job
dk-database_api_smoketest_negative:
stage: e2e-test
extends: .dk-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
database_api_negative/ api_smoketest_database_api_negative.xml \
$RUNNER_TEST_JOBS
artifacts:
when: always
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
<<: *common_api_smoketest_job
dk-follow_api_smoketest:
stage: e2e-test
extends: .dk-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
follow_api_patterns/ api_smoketest_follow_api.xml \
$RUNNER_TEST_JOBS
artifacts:
when: always
reports:
junit: api_smoketest.xml
follow_api_smoketest_negative:
<<: *common_api_smoketest_job
dk-follow_api_smoketest_negative:
stage: e2e-test
extends: .dk-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
follow_api_negative/ api_smoketest_follow_api_negative.xml \
$RUNNER_TEST_JOBS
artifacts:
when: always
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
<<: *common_api_smoketest_job
dk-tags_api_smoketest:
stage: e2e-test
extends: .dk-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
tags_api_negative/ api_smoketest_tags_api_negative.xml \
$RUNNER_TEST_JOBS
artifacts:
when: always
reports:
junit: api_smoketest_tags_api_negative.xml
dk-tags_api_smoketest_negative:
stage: e2e-test
extends: .dk-test-common
script:
- |
./scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
tags_api_patterns/ api_smoketest_tags_api.xml \
$RUNNER_TEST_JOBS
artifacts:
when: always
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
<<: *common_api_smoketest_job
dk-mock_tests:
stage: e2e-test
extends: .dk-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml
- |
scripts/ci/start-api-smoketest.sh \
localhost "$RUNNER_HIVEMIND_SERVER_HTTP_PORT" \
mock_tests/ api_smoketest_mock_tests.xml \
$RUNNER_TEST_JOBS
api_smoketest_benchmark:
dk-api-smoketest-benchmark:
stage: benchmark-tests
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
extends: .dk-test-common
# Temporary failure (when any call is longer than 1s is allowed)
allow_failure: true
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
script:
- ./scripts/ci_start_api_benchmarks.sh localhost $HIVEMIND_HTTP_PORT 5
- |
./scripts/ci/start-api-benchmarks.sh \
localhost $RUNNER_HIVEMIND_SERVER_HTTP_PORT 5 \
$RUNNER_TEST_JOBS
- ./scripts/xml_report_parser.py . ./tests/tests_api/hivemind/tavern
artifacts:
when: always
paths:
......
version: "3"
version: "3.2"
services:
python-3.6:
image: hivemind/python:3.6
python-3.6-dev:
image: hivemind/python:3.6-dev
build:
context: .
dockerfile: ./scripts/ci/python/3.6/Dockerfile
dockerfile: ./scripts/ci/python/3.6/dev.dockerfile
args:
- user=${USER}
- workdir=/home/${USER}
- workdir=/home/${USER}/hivemind
user: ${USER}
shm_size: 0
# Below command makes your container running forever.
# security_opt:
# # Significant performance boost (about 5%), but very insecure.
# # See https://medium.com/better-programming/faster-python-in-docker-d1a71a9b9917
# # See https://docs.docker.com/engine/security/seccomp/
# - seccomp:unconfined
shm_size: 2g
# command: ["tail", "-f", "/dev/null"]
volumes:
# Sockets of postgres servers on dockers.
- "postgres-10-run:/var/run/postgres-10"
- "postgres-12-run:/var/run/postgres-12"
# Sockets of postgres servers on host.
- "/var/run/postgresql:/var/run/postgresql"
# For keeping python dependencies created in docker.
- "python-3.6-dev:/home/${USER}"
# Application stuff from host.
- "$PWD/hive:$PWD/hive"
- "$PWD/tests:$PWD/tests"
- "$PWD/hive.conf:$PWD/hive.conf"
- "$PWD/pyproject.toml:$PWD/pyproject.toml"
- "$PWD/README.md:$PWD/README.md"
- "$PWD/setup.cfg:$PWD/setup.cfg"
- "$PWD/setup.py:$PWD/setup.py"
- "$PWD/tox.ini:$PWD/tox.ini"
python-3.6:
image: hivemind/python:3.6
build:
context: .
dockerfile: ./scripts/ci/python/3.6/Dockerfile
args:
- user=worker
user: worker
shm_size: 2g
volumes:
# Sockets of postgres servers on host.
- "/var/run/postgresql:/var/run/postgresql"
python-3.8:
image: hivemind/python:3.8
shm_size: 0
build:
context: .
dockerfile: ./scripts/ci/python/3.8/Dockerfile
args:
- user=${USER}
- workdir=/home/${USER}
user: ${USER}
# Below command makes your container running forever.
# command: ["tail", "-f", "/dev/null"]
- user=worker
user: worker
shm_size: 2g
volumes:
# Sockets of postgres servers on host.
- "/var/run/postgresql:/var/run/postgresql"
postgres-10:
image: hivemind/postgres:10
......@@ -37,35 +75,17 @@ services:
environment:
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
volumes:
- postgres-10-pgdata:/var/lib/postgresql/data
- $PWD/$POSTGRES_10_CONF_FILE:/etc/postgresql/postgresql.conf:ro
- postgres-10-run:/var/run/postgresql
ports:
- "${POSTGRES_10_PUBLISHED_PORT}:5432"
shm_size: 0
shm_size: 12g
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=12GB",
"-c", "effective_cache_size=36GB",
"-c", "maintenance_work_mem=2GB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=31457kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=12",
"-c", "max_parallel_workers_per_gather=4",
"-c", "max_parallel_workers=12",
"-c", "config_file=/etc/postgresql/postgresql.conf"
]
postgres-12:
image: hivemind/postgres:12
restart: unless-stopped
......@@ -75,56 +95,38 @@ services:
environment:
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
volumes:
- postgres-12-pgdata:/var/lib/postgresql/data
- $PWD/$POSTGRES_12_CONF_FILE:/etc/postgresql/postgresql.conf:ro
- postgres-12-run:/var/run/postgresql
ports:
- "${POSTGRES_12_PUBLISHED_PORT}:5432"
shm_size: 0
# https://pgtune.leopard.in.ua/#/ oltp 48G ram, 12 cpus, ssd
shm_size: 12g
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=12GB",
"-c", "effective_cache_size=36GB",
"-c", "maintenance_work_mem=2GB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=31457kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=12",
"-c", "max_parallel_workers_per_gather=4",
"-c", "max_parallel_workers=12",
"-c", "max_parallel_maintenance_workers=4",
"-c", "config_file=/etc/postgresql/postgresql.conf"
]
hived-node:
image: registry.gitlab.syncad.com/hive/hive/consensus_node:00b5ff55
image: $HIVED_IMAGE
restart: unless-stopped
# ports:
# - "2001:2001"
# - "8090:8090"
# - "8091:8091"
shm_size: 0
ports:
- "$HIVED_PUBLISHED_WS_PORT:8090" # websocket
- "$HIVED_PUBLISHED_HTTP_PORT:8091"
shm_size: 12g
entrypoint: /usr/local/hive/consensus/entrypoint.sh
command: >-
--replay-blockchain
--stop-replay-at-block 5000000
command: [
"--replay-blockchain",
"--stop-replay-at-block 5000000"
]
volumes:
- $PWD/scripts/ci/hived-node/entrypoint.sh:/usr/local/hive/consensus/entrypoint.sh
- $PWD/scripts/ci/hived-node/config.ini:/usr/local/hive/consensus/datadir/config.ini
- ${HIVED_BLOCK_LOG_FILE}:/usr/local/hive/consensus/datadir/blockchain/block_log
- hived-node-datadir:/usr/local/hive/consensus/datadir
volumes:
postgres-10-pgdata:
postgres-12-pgdata:
postgres-10-run:
postgres-12-run:
hived-node-datadir:
python-3.6-dev:
......@@ -21,13 +21,13 @@ def setup_logging(conf):
fmt = '%(asctime)s.%(msecs)03d{} %(created).6f ' \
'%(levelname)s - %(name)s - %(message)s'.format(timezone)
logging.basicConfig(format=fmt, datefmt=datefmt)
if timestamp:
elif timestamp:
datefmt='%Y-%m-%d %H:%M:%S'
timezone = time.strftime('%z')
fmt = '%(asctime)s.%(msecs)03d{} ' \
'%(levelname)s - %(name)s - %(message)s'.format(timezone)
logging.basicConfig(format=fmt, datefmt=datefmt)
if epoch:
elif epoch:
fmt = '%(created).6f %(levelname)s - %(name)s - %(message)s'
logging.basicConfig(format=fmt)
else:
......
......@@ -840,7 +840,70 @@
"transaction_merkle_root": "0000000000000000000000000000000000000000",
"extensions": [],
"witness_signature": "",
"transactions": [],
"transactions": [
{
"ref_block_num": 100001,
"ref_block_prefix": 1,
"expiration": "2020-03-23T12:17:00",
"operations": [
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"tester1"
],
"id": "follow",
"json": "[\"follow\",{\"follower\":\"tester1\",\"following\":\"tester2\",\"what\":[\"blog\"]}]"
}
},
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"tester2"
],
"id": "follow",
"json": "[\"follow\",{\"follower\":\"tester2\",\"following\":[\"tester3\", \"tester4\"],\"what\":[\"blog\"]}]"
}
},
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"tester3"
],
"id": "follow",
"json": "[\"follow\",{\"follower\":\"tester3\",\"following\":[\"tester4\"],\"what\":[\"blog\"]}]"
}
},
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"tester4"
],
"id": "follow",
"json": "[\"follow\",{\"follower\":\"tester4\",\"following\":[\"tester5\", \"tester1\"],\"what\":[\"blog\"]}]"
}
},
{
"type": "custom_json_operation",
"value": {
"required_auths": [],
"required_posting_auths": [
"tester5"
],
"id": "follow",
"json": "[\"follow\",{\"follower\":\"tester5\",\"following\":[\"tester1\", \"tester2\"],\"what\":[\"blog\"]}]"
}
}
]
}
],
"block_id": "004c4b4e00000000000000000000000000000000",
"signing_key": "",
"transaction_ids": []
......
stages:
- build
- test
- data-supply
- deploy
- e2e-test
- benchmark-tests
- post-deploy
variables:
GIT_DEPTH: 1
LC_ALL: "C"
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG"
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID))
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
default:
before_script:
- pwd
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
hivemind_build:
stage: build
script:
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
tags:
- hivemind
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
hivemind_sync:
stage: data-supply
environment:
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME"
needs:
- job: hivemind_build
artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- pip3 install --user --upgrade pip setuptools
# WARNING!!! temporarily hardcoded 5000017 instead $HIVEMIND_MAX_BLOCK
# revert that change when $HIVEMIND_MAX_BLOCK will be set to 5000017
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" 5000017 $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hivemind-sync.log
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hive_server.pid
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: always
- if: '$CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_stop_server:
stage: post-deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
script:
- scripts/ci_stop_server.sh hive_server.pid
needs:
- job: hivemind_start_server
artifacts: true
tags:
- hivemind
artifacts:
paths:
- hive_server.log
.hivemind_start_api_smoketest: &common_api_smoketest_job
stage: e2e-test
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
bridge_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest.xml
follow_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml
mock_tests:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" mock_tests/ api_smoketest_mock_tests.xml
api_smoketest_benchmark:
stage: benchmark-tests
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
allow_failure: true
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
script:
- ./scripts/ci_start_api_benchmarks.sh localhost $HIVEMIND_HTTP_PORT 5
artifacts:
when: always
paths:
- tavern_benchmarks_report.html
stages:
- run
variables:
GIT_DEPTH: 10
GIT_STRATEGY: fetch # It's quick, but noticed errors with that, sometimes.
# GIT_STRATEGY: clone
# GIT_STRATEGY: none
GIT_SUBMODULE_STRATEGY: recursive
MY_VARIABLE: "bamboo"
default:
image: hivemind/python:3.6
interruptible: false
cache: &global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key.
# Change this key, if you need to clear cache.
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- echo "I am before_script in child-1. MY_VARIABLE is $MY_VARIABLE"
after_script:
- echo "I am after_script in in child-1. MY_VARIABLE is $MY_VARIABLE"
child-1-job:
stage: run
rules:
- when: manual
script:
- echo "I am script in child-1-job. MY_VARIABLE is $MY_VARIABLE"
- sleep 30
- exit 1
tags:
- hivemind-light-job
stages:
- run
variables:
GIT_DEPTH: 10
GIT_STRATEGY: fetch # It's quick, but noticed errors with that, sometimes.
# GIT_STRATEGY: clone
# GIT_STRATEGY: none
GIT_SUBMODULE_STRATEGY: recursive
MY_VARIABLE: "bamboo"
default:
image: hivemind/python:3.6
interruptible: false
cache: &global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key.
# Change this key, if you need to clear cache.
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- echo "I am before_script in child-2. MY_VARIABLE is $MY_VARIABLE"
after_script:
- echo "I am after_script in child-2. MY_VARIABLE is $MY_VARIABLE"
child-2-job:
stage: run
script:
- echo "I am script in child-2-job. MY_VARIABLE is $MY_VARIABLE"
tags:
- hivemind-light-job
# See https://gitlab.com/fgrimshaw/dynamic-ci
# See https://gitlab.com/gitlab-org/gitlab/-/issues/212373
# I tested this feature, but our current version of Gitlab 13.2.2
# doesn't support it well. Child pipelines run with no problem,
# but UI displays wrong badges, for instance job was marked as
# still running, though it was finished. Also jobs with rule
# "when: manual" where started without user's permission.
# We need to wait for better support in Gitlab UI.
stages:
- run
variables:
GIT_STRATEGY: none
trigger-child-1:
stage: run
rules:
- if: '$CI_COMMIT_MESSAGE =~ /child-1/'
when: always
trigger:
include: .gitlab-ci-child-pipeline-1.yaml
strategy: depend
trigger-child-2:
stage: run
rules:
- if: '$CI_COMMIT_MESSAGE =~ /child-2/'
when: always
trigger:
include: .gitlab-ci-child-pipeline-2.yaml
strategy: depend
......@@ -2,25 +2,27 @@
set -euo pipefail
collect_stats() {
collect_db_stats() {
echo "Collecting statistics from database ${HIVEMIND_DB_NAME}"
mkdir -p pg-stats
DIR=$PWD/pg-stats
PGPASSWORD=${POSTGRES_PASSWORD} psql \
--username "${POSTGRES_USER}" \
--host ${POSTGRES_HOST} \
--port ${POSTGRES_PORT} \
PGPASSWORD=${RUNNER_POSTGRES_APP_USER_PASSWORD} psql \
--username "${RUNNER_POSTGRES_APP_USER=}" \
--host ${RUNNER_POSTGRES_HOST} \
--port ${RUNNER_POSTGRES_PORT} \
--dbname ${HIVEMIND_DB_NAME} << EOF
\timing
\copy (select * from pg_settings) to '$DIR/pg_settings.csv' WITH CSV HEADER
\copy (select * from pg_stat_user_tables) to '$DIR/pg_stat_user_tables.csv' WITH CSV HEADER
-- Disabled, because this table is too big.
--\copy (select * from pg_stat_statements) to '$DIR/pg_stat_statements.csv' WITH CSV HEADER
-- \copy (select * from pg_stat_statements) to '$DIR/pg_stat_statements.csv' WITH CSV HEADER
/*
-- Looks to be unuseful.
-- See https://github.com/powa-team/pg_qualstats
\echo pg_qualstats index advisor
SELECT v
......@@ -33,8 +35,9 @@ SELECT v
FROM json_array_elements(
pg_qualstats_index_advisor(min_filter => 50)->'unoptimised') v
ORDER BY v::text COLLATE "C";
*/
EOF
}
collect_stats
collect_db_stats
......@@ -2,46 +2,48 @@
set -euo pipefail
# TODO We have troubles with user, when postgresql is run from docker.
# We need user name `postgres`, not other, I'm afraid.
# ADMIN_POSTGRES_USER=postgres
# ADMIN_POSTGRES_USER_PASSWORD=postgres
create_db() {
echo "Creating user ${HIVEMIND_POSTGRES_USER} and database ${HIVEMIND_DB_NAME}, owned by this user"
echo "Creating user ${RUNNER_POSTGRES_APP_USER} and database ${HIVEMIND_DB_NAME}, owned by this user"
TEMPLATE="template_monitoring"
PGPASSWORD=${ADMIN_POSTGRES_USER_PASSWORD} psql \
--username "${ADMIN_POSTGRES_USER}" \
--host ${POSTGRES_HOST} \
--port ${POSTGRES_PORT} \
PGPASSWORD=${RUNNER_POSTGRES_ADMIN_USER_PASSWORD} psql \
--username "${RUNNER_POSTGRES_ADMIN_USER}" \
--host ${RUNNER_POSTGRES_HOST} \
--port ${RUNNER_POSTGRES_PORT} \
--dbname postgres << EOF
\echo Creating role ${HIVEMIND_POSTGRES_USER}
\echo Creating role ${RUNNER_POSTGRES_APP_USER}
DO \$$
BEGIN
IF EXISTS (SELECT * FROM pg_user
WHERE pg_user.usename = '${HIVEMIND_POSTGRES_USER}') THEN
raise warning 'Role % already exists', '${HIVEMIND_POSTGRES_USER}';
WHERE pg_user.usename = '${RUNNER_POSTGRES_APP_USER}') THEN
raise warning 'Role % already exists', '${RUNNER_POSTGRES_APP_USER}';
ELSE
CREATE ROLE ${HIVEMIND_POSTGRES_USER}
WITH LOGIN PASSWORD '${HIVEMIND_POSTGRES_USER_PASSWORD}';
CREATE ROLE ${RUNNER_POSTGRES_APP_USER}
WITH LOGIN PASSWORD '${RUNNER_POSTGRES_APP_USER_PASSWORD}';
END IF;
END
\$$;
\echo Creating database ${HIVEMIND_DB_NAME}
-- We drop database to enable retry of CI job.
\echo Dropping database ${HIVEMIND_DB_NAME}
DROP DATABASE IF EXISTS ${HIVEMIND_DB_NAME};
CREATE DATABASE ${HIVEMIND_DB_NAME} TEMPLATE template_monitoring
OWNER ${HIVEMIND_POSTGRES_USER};
\echo Creating database ${HIVEMIND_DB_NAME}
CREATE DATABASE ${HIVEMIND_DB_NAME} TEMPLATE ${TEMPLATE}
OWNER ${RUNNER_POSTGRES_APP_USER};
COMMENT ON DATABASE ${HIVEMIND_DB_NAME} IS
'Database for Gitlab CI pipeline ${CI_PIPELINE_URL}, commit ${CI_COMMIT_SHORT_SHA}';
\c ${HIVEMIND_DB_NAME}
drop schema if exists hivemind_admin cascade;
create schema hivemind_admin
authorization ${HIVEMIND_POSTGRES_USER};
authorization ${RUNNER_POSTGRES_APP_USER};
CREATE SEQUENCE hivemind_admin.database_metadata_id_seq
INCREMENT 1
......@@ -63,10 +65,10 @@ CREATE TABLE hivemind_admin.database_metadata
);
alter sequence hivemind_admin.database_metadata_id_seq
OWNER TO ${HIVEMIND_POSTGRES_USER};
OWNER TO ${RUNNER_POSTGRES_APP_USER};
alter table hivemind_admin.database_metadata
OWNER TO ${HIVEMIND_POSTGRES_USER};
OWNER TO ${RUNNER_POSTGRES_APP_USER};
insert into hivemind_admin.database_metadata
(database_name, ci_pipeline_url, ci_pipeline_id, commit_sha)
......@@ -75,6 +77,8 @@ values (
${CI_PIPELINE_ID}, '${CI_COMMIT_SHORT_SHA}'
);
-- VACUUM VERBOSE ANALYZE;
\q
EOF
......
......@@ -2,26 +2,30 @@
set -euo pipefail
echo "Dumping database ${HIVEMIND_DB_NAME}"
dump_db() {
echo "Dumping database ${HIVEMIND_DB_NAME}"
export PGPASSWORD=${POSTGRES_PASSWORD}
exec_path=$POSTGRES_CLIENT_TOOLS_PATH/$POSTGRES_MAJOR_VERSION/bin
export PGPASSWORD=${RUNNER_POSTGRES_APP_USER_PASSWORD}
exec_path=$POSTGRES_CLIENT_TOOLS_PATH/$POSTGRES_MAJOR_VERSION/bin
echo "Using pg_dump version $($exec_path/pg_dump --version)"
echo "Using pg_dump version $($exec_path/pg_dump --version)"
time $exec_path/pg_dump \
--username="${POSTGRES_USER}" \
--host="${POSTGRES_HOST}" \
--port="${POSTGRES_PORT}" \
--dbname="${HIVEMIND_DB_NAME}" \
--schema=public \
--format=directory \
--jobs=4 \
--compress=6 \
--quote-all-identifiers \
--lock-wait-timeout=30000 \
--no-privileges --no-acl \
--verbose \
--file="pg-dump-${HIVEMIND_DB_NAME}"
time $exec_path/pg_dump \
--username="${RUNNER_POSTGRES_APP_USER}" \
--host="${RUNNER_POSTGRES_HOST}" \
--port="${RUNNER_POSTGRES_PORT}" \
--dbname="${HIVEMIND_DB_NAME}" \
--schema=public \
--format=directory \
--jobs=4 \
--compress=6 \
--quote-all-identifiers \
--lock-wait-timeout=30000 \
--no-privileges --no-acl \
--verbose \
--file="pg-dump-${HIVEMIND_DB_NAME}"
unset PGPASSWORD
unset PGPASSWORD
}
dump_db
......@@ -5,17 +5,16 @@
set -euo pipefail
get_postgres_version() {
# Get major version of postgres server.
version=$(
PGPASSWORD=$POSTGRES_PASSWORD psql -X -A -t \
--username $POSTGRES_USER \
--host $POSTGRES_HOST \
--port ${POSTGRES_PORT} \
PGPASSWORD=$RUNNER_POSTGRES_APP_USER_PASSWORD psql -X -A -t \
--username $RUNNER_POSTGRES_APP_USER \
--host $RUNNER_POSTGRES_HOST \
--port ${RUNNER_POSTGRES_PORT} \
--dbname postgres \
-c "show server_version_num;"
)
echo $(echo $version | cut -c1-2)
}
get_postgres_version
stages:
- build
- data-supply
- e2e-test
variables:
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# GIT_DEPTH: 10
GIT_DEPTH: 1
# GIT_STRATEGY: fetch # Noticed errors with that.
GIT_STRATEGY: clone
# GIT_STRATEGY: none
GIT_SUBMODULE_STRATEGY: recursive
PIPENV_VENV_IN_PROJECT: 1
PIPENV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pipenv"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
POSTGRES_CLIENT_TOOLS_PATH: /usr/lib/postgresql
# POSTGRES_HOST: 172.17.0.1 # Host
# POSTGRES_HOST: postgres-10 # Docker service
POSTGRES_PORT: 5432
# Set on project level in Gitlab CI.
# We need create role and create db privileges.
# ADMIN_POSTGRES_USER: postgres
# ADMIN_POSTGRES_USER_PASSWORD: postgres
# Needed by old runner ssh-executor, probably.
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
HIVEMIND_DB_NAME: "hive_${CI_COMMIT_REF_SLUG}_pipeline_id_${CI_PIPELINE_ID}"
HIVEMIND_EXEC_NAME: $DB_NAME
# Set on project level in Gitlab CI.
# HIVEMIND_POSTGRES_USER: hivemind_ci
# Set on project level in Gitlab CI.
HIVEMIND_POSTGRES_USER_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# Set on project level in Gitlab CI.
# HIVEMIND_HTTP_PORT: 18080
# Set on project level in Gitlab CI.
# HIVEMIND_MAX_BLOCK: 10001
# HIVEMIND_MAX_BLOCK: 5000001
# Set on project level in Gitlab CI.
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://hive-4.pl.syncad.com:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"192.168.6.136:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://172.17.0.1:8091"}
# Useful snippets for Gitlab CI, but not used currently.
.postgres-10: &postgres-10
name: hivemind/postgres:10
......@@ -114,17 +56,6 @@ variables:
"-c", "max_parallel_workers=4",
]
.setup-pip: &setup-pip
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- time pip install --editable .
.setup-setuptools: &setup-setuptools
- python -m venv .venv
- source .venv/bin/activate
......@@ -136,9 +67,9 @@ variables:
- poetry --version
- time python setup.py develop
# no virtual environment
.setuptools: &setup-setuptools-no-venv
# setuptools will install all dependencies to this directory.
.setup-setuptools-no-venv: &setup-setuptools-no-venv
# No virtual environment here.
# Setuptools will install all dependencies to PYTHONUSERBASE directory.
- export PYTHONUSERBASE=./local-site
- time pip install --upgrade pip setuptools wheel
- pip --version
......@@ -148,7 +79,6 @@ variables:
- poetry --version
- mkdir -p `python -m site --user-site`
- python setup.py install --user --force
# we can probably also run via: ./hive/cli.py
- ln -sf ./local-site/bin/hive "$HIVEMIND_EXEC_NAME"
.setup-pipenv: &setup-pipenv
......@@ -167,67 +97,6 @@ variables:
- pipenv --version
- poetry --version
.set-variables: &set-variables
- whoami
# list all variables predefined by Gitlab CI
# - export
- echo "CI_PIPELINE_URL is $CI_PIPELINE_URL"
- echo "CI_PIPELINE_ID is $CI_PIPELINE_ID"
- echo "CI_COMMIT_SHORT_SHA is $CI_COMMIT_SHORT_SHA"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
- export HIVEMIND_DB_NAME=${HIVEMIND_DB_NAME//[^a-zA-Z0-9_]/_}
- echo "HIVEMIND_DB_NAME is $HIVEMIND_DB_NAME"
- export HIVEMIND_POSTGRESQL_CONNECTION_STRING=postgresql://${HIVEMIND_POSTGRES_USER}:${HIVEMIND_POSTGRES_USER_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${HIVEMIND_DB_NAME}
.fetch-git-tags: &fetch-git-tags
# - git fetch --tags
- git tag -f ci_implicit_tag # Needed to build python package
.start_timer: &start-timer
- ./scripts/ci/timer.sh start
.stop-timer: &stop-timer
- ./scripts/ci/timer.sh check
.hive-sync-script-common: &hive-sync-script-common
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- export POSTGRES_MAJOR_VERSION=$(./scripts/ci/get-postgres-version.sh)
- ./scripts/ci/create-db.sh
- ./scripts/ci/hive-sync.sh
- ./scripts/ci/collect-db-stats.sh
.default-rules: &default-rules
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
default:
image: hivemind/python:3.6
# image: hivemind/python:3.8
interruptible: false
timeout: 2h
cache: &global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key. Change this key, if you need
# to clear cache
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
after_script:
- *stop-timer
##### Jobs #####
......@@ -257,31 +126,16 @@ default:
tags:
- hivemind-light-job
# Postgres shared
hivemind-sync:
<<: *default-rules
stage: data-supply
needs: []
script:
- *hive-sync-script-common
artifacts:
paths:
- hivemind-sync.log
- pg-stats
expire_in: 7 days
tags:
- hivemind-heavy-job
# Postgres as service
.hivemind-sync:
<<: *default-rules
# Postgres as docker service
.hivemind-sync-postgres-as-service:
# <<: *default-rules
stage: data-supply
services:
- *postgres-10
# - *postgres-12
needs: []
script:
- *hive-sync-script-common
# - *hive-sync-script-common
# - ./scripts/ci/dump-db.sh
artifacts:
paths:
......@@ -292,131 +146,9 @@ hivemind-sync:
tags:
- hivemind-heavy-job
.e2e-test-common:
rules:
- when: on_success
needs:
- job: hivemind-sync
artifacts: false
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- ./scripts/ci/hive-server.sh start
after_script:
- ./scripts/ci/hive-server.sh stop
- *stop-timer
tags:
- hivemind-light-job
bridge_api_smoketest:
# Test job doing nothing (for debugging CI)
.just-a-test:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest_follow_api.xml
follow_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
stage: e2e-test
extends: .e2e-test-common
script:
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_negative/ api_smoketest_tags_api_negative.xml
artifacts:
reports:
junit: api_smoketest_tags_api_negative.xml
- echo "Run some tests"
......@@ -4,17 +4,11 @@
set -euo pipefail
JOB=$1
HIVEMIND_PID=0
MERCY_KILL_TIMEOUT=5
START_DELAY=5
# For debug only!
# HIVED_URL='{"default":"http://hived-node:8091"}'
# HIVED_URL='{"default":"http://172.17.0.1:8091"}'
# HIVED_URL='{"default":"http://127.0.0.1:8091"}'
# HIVEMIND_HTTP_PORT="8080"
# HIVEMIND_POSTGRESQL_CONNECTION_STRING="postgresql://syncad:devdev@localhost:5432/hive_test"
check_pid() {
if [ -f hive_server.pid ]; then
HIVEMIND_PID=`cat hive_server.pid`
......@@ -24,6 +18,7 @@ check_pid() {
echo "Process pid $HIVEMIND_PID is running"
else
# Process is not running
echo "Process pid $HIVEMIND_PID is not running"
rm hive_server.pid
HIVEMIND_PID=0
fi
......@@ -33,7 +28,7 @@ check_pid() {
}
stop() {
if [ "$HIVEMIND_PID" -gt "0" ]; then
if [ "$HIVEMIND_PID" -gt 0 ]; then
HIVEMIND_PID=`cat hive_server.pid`
# Send INT signal and give it some time to stop.
......@@ -52,22 +47,25 @@ stop() {
fi
}
start() {
if [ "$HIVEMIND_PID" -gt "0" ]; then
if [ "$HIVEMIND_PID" -gt 0 ]; then
echo "Hive server is already running (pid $HIVEMIND_PID)"
exit 0
fi
echo "Starting hive server on port ${HIVEMIND_HTTP_PORT}"
echo "Starting hive server on port ${RUNNER_HIVEMIND_SERVER_HTTP_PORT}"
USER=${RUNNER_POSTGRES_APP_USER}:${RUNNER_POSTGRES_APP_USER_PASSWORD}
OPTIONS="host=${RUNNER_POSTGRES_HOST}&port=${RUNNER_POSTGRES_PORT}"
DATABASE_URL="postgresql://${USER}@/${HIVEMIND_DB_NAME}?${OPTIONS}"
hive server \
--log-mask-sensitive-data \
--pid-file hive_server.pid \
--http-server-port $HIVEMIND_HTTP_PORT \
--steemd-url "$HIVED_URL" \
--database-url "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" 2>&1 \
--http-server-port ${RUNNER_HIVEMIND_SERVER_HTTP_PORT} \
--steemd-url "${RUNNER_HIVED_URL}" \
--database-url "${DATABASE_URL}" 2>&1 \
| tee -ia hivemind-server.log &
HIVEMIND_PID=$!
......@@ -81,11 +79,14 @@ start() {
if ps -p $HIVEMIND_PID > /dev/null
then
echo "Hive server is running (pid $HIVEMIND_PID)"
# Write pid to file, sometimes there's wrong pid there.
echo $HIVEMIND_PID > hive_server.pid
exit 0
else
# Check if process executed successfully or not.
if wait $HIVEMIND_PID; then
echo "Hive server has been started (pid $HIVEMIND_PID)"
echo $HIVEMIND_PID > hive_server.pid
exit 0
else
RESULT=$?
......@@ -107,5 +108,16 @@ start() {
}
check_pid
"$1"
main() {
check_pid
if [ "$JOB" = "start" ]; then
start
elif [ "$JOB" = "stop" ]; then
stop
else
echo "Invalid argument"
exit 1
fi
}
main
......@@ -2,25 +2,35 @@
set -euo pipefail
cat << EOF
Starting hive sync using hived url: ${HIVED_URL}.
Max sync block is: ${HIVEMIND_MAX_BLOCK}.
# For debug only!
# RUNNER_HIVEMIND_SYNC_MAX_BLOCK=10000
# RUNNER_HIVED_URL='{"default":"http://hived-node:8091"}'
# RUNNER_HIVED_URL='{"default":"http://172.17.0.1:8091"}'
hive_sync() {
# Start hive sync process
cat << EOF
Starting hive sync using hived url: ${RUNNER_HIVED_URL}.
Max sync block is: ${RUNNER_HIVEMIND_SYNC_MAX_BLOCK}.
EOF
# For debug only!
# HIVEMIND_MAX_BLOCK=10001
# HIVED_URL='{"default":"http://hived-node:8091"}'
# HIVED_URL='{"default":"http://172.17.0.1:8091"}'
DATABASE_URL="postgresql://${HIVEMIND_POSTGRES_USER}:${HIVEMIND_POSTGRES_USER_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${HIVEMIND_DB_NAME}"
hive sync \
--log-mask-sensitive-data \
--pid-file hive_sync.pid \
--test-max-block=${HIVEMIND_MAX_BLOCK} \
--exit-after-sync \
--test-profile=False \
--steemd-url "$HIVED_URL" \
--prometheus-port 11011 \
--database-url "$DATABASE_URL" \
2>&1 | tee -i hivemind-sync.log
USER=${RUNNER_POSTGRES_APP_USER}:${RUNNER_POSTGRES_APP_USER_PASSWORD}
OPTIONS="host=${RUNNER_POSTGRES_HOST}&port=${RUNNER_POSTGRES_PORT}"
DATABASE_URL="postgresql://${USER}@/${HIVEMIND_DB_NAME}?${OPTIONS}"
hive sync \
--log-mask-sensitive-data \
--pid-file hive_sync.pid \
--test-max-block=${RUNNER_HIVEMIND_SYNC_MAX_BLOCK} \
--exit-after-sync \
--test-profile=False \
--steemd-url "${RUNNER_HIVED_URL}" \
--prometheus-port 11011 \
--database-url "${DATABASE_URL}" \
--mock-block-data-path mock_data/block_data/follow_op/mock_block_data_follow.json \
2>&1 | tee -i hivemind-sync.log
}
hive_sync
......@@ -8,7 +8,7 @@ plugin = webserver p2p json_rpc
plugin = database_api
# condenser_api enabled per abw request
plugin = condenser_api
plugin = block_api
plugin = block_api
# gandalf enabled witness + rc
plugin = witness
plugin = rc
......@@ -34,7 +34,7 @@ plugin = block_api network_broadcast_api rc_api
history-disable-pruning = 1
account-history-rocksdb-path = "blockchain/account-history-rocksdb-storage"
#shared-file-dir = "/run/hive"
# shared-file-dir = "/run/hive"
shared-file-size = 20G
shared-file-full-threshold = 9500
shared-file-scale-rate = 1000
......@@ -45,8 +45,8 @@ market-history-bucket-size = [15,60,300,3600,86400]
market-history-buckets-per-size = 5760
p2p-endpoint = 0.0.0.0:2001
p2p-seed-node =
#gtg.openhive.network:2001
p2p-seed-node =
# gtg.openhive.network:2001
transaction-status-block-depth = 64000
transaction-status-track-after-block = 42000000
......
......@@ -4,6 +4,7 @@
MYDIR="$PWD"
WORKDIR="/usr/local/hive/consensus"
IMAGE="registry.gitlab.syncad.com/hive/hive/consensus_node:00b5ff55"
docker run -d \
--name hived-replay-5000000 \
......@@ -14,5 +15,5 @@ docker run -d \
-v $MYDIR/blockchain/block_log:$WORKDIR/datadir/blockchain/block_log \
-v $MYDIR/entrypoint.sh:$WORKDIR/entrypoint.sh \
--entrypoint $WORKDIR/entrypoint.sh \
registry.gitlab.syncad.com/hive/hive/consensus_node:00b5ff55 \
$IMAGE \
--replay-blockchain --stop-replay-at-block 5000000
This diff is collapsed.