Skip to content
Snippets Groups Projects
Commit 23b2cbae authored by Wojciech Barcik's avatar Wojciech Barcik Committed by Bartek Wrona
Browse files

Implemented concurrent pipelines on Gitlab CI

parent 62e741f3
No related branches found
No related tags found
1 merge request!456Release candidate v1 24
Showing
with 1320 additions and 181 deletions
......@@ -37,6 +37,7 @@ var/
*.egg-info/
.installed.cfg
*.egg
pip-wheel-metadata
# PyInstaller
# Usually these files are written by a python script from a template
......@@ -134,10 +135,12 @@ tests/failed_blocks/
# version.py
hive/version.py
# hivemind.port
hivemind.port
hive_server.pid
hivemind-server.pid
Pipfile.lock
pghero.yml
*~
.tmp
# https://hub.docker.com/r/library/python/tags/
image: "python:3.7"
stages:
- build
- test
- data-supply
- deploy
- e2e-test
- post-deploy
variables:
GIT_DEPTH: 1
LC_ALL: "C"
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG"
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID))
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
before_script:
- pwd
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
hivemind_build:
stage: build
script:
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- when: always
tags:
- hivemind
hivemind_sync:
stage: data-supply
environment:
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME"
needs:
- job: hivemind_build
artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- pip3 install --user --upgrade pip setuptools
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_MAX_BLOCK $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hivemind-sync.log
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hive_server.pid
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_stop_server:
stage: post-deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- when: manual
script:
- scripts/ci_stop_server.sh hive_server.pid
needs:
- job: hivemind_start_server
artifacts: true
tags:
- hivemind
artifacts:
paths:
- hive_server.log
.hivemind_start_api_smoketest: &common_api_smoketest_job
stage: e2e-test
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
bridge_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest_follow_api.xml
follow_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml
artifacts:
reports:
junit: api_smoketest_tags_api_negative.xml
# https://hub.docker.com/r/library/python/tags/
image: "python:3.7"
stages:
- build
- test
- data-supply
- deploy
- e2e-test
- post-deploy
- build
- data-supply
- e2e-test
variables:
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# GIT_DEPTH: 10
GIT_DEPTH: 1
LC_ALL: "C"
# GIT_STRATEGY: fetch # Noticed errors with that.
GIT_STRATEGY: clone
# GIT_STRATEGY: none
GIT_SUBMODULE_STRATEGY: recursive
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG"
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID))
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
PIPENV_VENV_IN_PROJECT: 1
PIPENV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pipenv"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
before_script:
- pwd
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
POSTGRES_CLIENT_TOOLS_PATH: /usr/lib/postgresql
hivemind_build:
stage: build
script:
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- when: always
# POSTGRES_HOST: 172.17.0.1 # Host
# POSTGRES_HOST: postgres-10 # Docker service
POSTGRES_PORT: 5432
tags:
- hivemind
# Set on project level in Gitlab CI.
# We need create role and create db privileges.
# ADMIN_POSTGRES_USER: postgres
# ADMIN_POSTGRES_USER_PASSWORD: postgres
hivemind_sync:
stage: data-supply
# Needed by old runner ssh-executor, probably.
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
environment:
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME"
HIVEMIND_DB_NAME: "hive_${CI_COMMIT_REF_SLUG}_pipeline_id_${CI_PIPELINE_ID}"
HIVEMIND_EXEC_NAME: $DB_NAME
# Set on project level in Gitlab CI.
# HIVEMIND_POSTGRES_USER: hivemind_ci
# Set on project level in Gitlab CI.
HIVEMIND_POSTGRES_USER_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# Set on project level in Gitlab CI.
# HIVEMIND_HTTP_PORT: 18080
# Set on project level in Gitlab CI.
# HIVEMIND_MAX_BLOCK: 10001
# HIVEMIND_MAX_BLOCK: 5000001
# Set on project level in Gitlab CI.
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://hive-4.pl.syncad.com:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"192.168.6.136:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://172.17.0.1:8091"}
.postgres-10: &postgres-10
name: hivemind/postgres:10
alias: db
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=2GB",
"-c", "effective_cache_size=6GB",
"-c", "maintenance_work_mem=512MB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=5242kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=4",
"-c", "max_parallel_workers_per_gather=2",
"-c", "max_parallel_workers=4",
]
.postgres-12: &postgres-12
name: hivemind/postgres:12
alias: db
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=2GB",
"-c", "effective_cache_size=6GB",
"-c", "maintenance_work_mem=512MB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=5242kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=4",
"-c", "max_parallel_workers_per_gather=2",
"-c", "max_parallel_workers=4",
]
.setup-pip: &setup-pip
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- time pip install --editable .
.setup-setuptools: &setup-setuptools
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- time python setup.py develop
# no virtual environment
.setuptools: &setup-setuptools-no-venv
# setuptools will install all dependencies to this directory.
- export PYTHONUSERBASE=./local-site
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- mkdir -p `python -m site --user-site`
- python setup.py install --user --force
# we can probably also run via: ./hive/cli.py
- ln -sf ./local-site/bin/hive "$HIVEMIND_EXEC_NAME"
.setup-pipenv: &setup-pipenv
## Note, that Pipfile must exist.
## `--sequential` is slower, but doesn't emit messages about errors
## and need to repeat install.
## - pipenv sync --dev --bare --sequential
## It's faster than `--sequential`, but emits messages about errors
## and a need to repeat install, sometimes. However seems these
## errors are negligible.
- time pipenv sync --dev --bare
- source .venv/bin/activate
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
.set-variables: &set-variables
- whoami
# list all variables predefined by Gitlab CI
# - export
- echo "CI_PIPELINE_URL is $CI_PIPELINE_URL"
- echo "CI_PIPELINE_ID is $CI_PIPELINE_ID"
- echo "CI_COMMIT_SHORT_SHA is $CI_COMMIT_SHORT_SHA"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
- export HIVEMIND_DB_NAME=${HIVEMIND_DB_NAME//[^a-zA-Z0-9_]/_}
- echo "HIVEMIND_DB_NAME is $HIVEMIND_DB_NAME"
- export HIVEMIND_POSTGRESQL_CONNECTION_STRING=postgresql://${HIVEMIND_POSTGRES_USER}:${HIVEMIND_POSTGRES_USER_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${HIVEMIND_DB_NAME}
needs:
- job: hivemind_build
artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
.fetch-git-tags: &fetch-git-tags
# - git fetch --tags
- git tag -f ci_implicit_tag # Needed to build python package
script:
- pip3 install --user --upgrade pip setuptools
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_MAX_BLOCK $HIVEMIND_HTTP_PORT
.start_timer: &start-timer
- ./scripts/ci/timer.sh start
artifacts:
paths:
- hivemind-sync.log
.stop-timer: &stop-timer
- ./scripts/ci/timer.sh check
expire_in: 1 week
.hive-sync-script-common: &hive-sync-script-common
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- export POSTGRES_MAJOR_VERSION=$(./scripts/ci/get-postgres-version.sh)
- ./scripts/ci/create-db.sh
- ./scripts/ci/hive-sync.sh
- ./scripts/ci/collect-db-stats.sh
.default-rules: &default-rules
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
......@@ -89,188 +207,217 @@ hivemind_sync:
when: manual
- when: on_success
default:
image: hivemind/python:3.6
# image: hivemind/python:3.8
interruptible: false
timeout: 2h
cache: &global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key. Change this key, if you need
# to clear cache
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
after_script:
- *stop-timer
##### Jobs #####
.build-egg:
stage: build
needs: []
script:
- python setup.py bdist_egg
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 7 days
tags:
- hivemind
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
- hivemind-light-job
.build-wheel:
stage: build
needs: []
script:
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT
- python setup.py bdist_wheel
- ls -l dist/*
artifacts:
paths:
- hive_server.pid
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
- dist/
expire_in: 7 days
tags:
- hivemind
hivemind_stop_server:
stage: post-deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
- hivemind-light-job
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- when: manual
# Postgres shared
hivemind-sync:
<<: *default-rules
stage: data-supply
needs: []
script:
- scripts/ci_stop_server.sh hive_server.pid
needs:
- job: hivemind_start_server
artifacts: true
- *hive-sync-script-common
artifacts:
paths:
- hivemind-sync.log
- pg-stats
expire_in: 7 days
tags:
- hivemind
- hivemind-heavy-job
# Postgres as service
.hivemind-sync:
<<: *default-rules
stage: data-supply
services:
- *postgres-10
# - *postgres-12
needs: []
script:
- *hive-sync-script-common
# - ./scripts/ci/dump-db.sh
artifacts:
paths:
- hive_server.log
.hivemind_start_api_smoketest: &common_api_smoketest_job
stage: e2e-test
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
variables:
GIT_STRATEGY: none
- hivemind-sync.log
- pg-stats
- pg-dump-${HIVEMIND_DB_NAME}
expire_in: 7 hours
tags:
- hivemind-heavy-job
.e2e-test-common:
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
needs:
- job: hivemind-sync
artifacts: false
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- ./scripts/ci/hive-server.sh start
after_script:
- ./scripts/ci/hive-server.sh stop
- *stop-timer
tags:
- hivemind
- hivemind-light-job
bridge_api_smoketest:
<<: *common_api_smoketest_job
stage: e2e-test
extends: .e2e-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
<<: *common_api_smoketest_job
stage: e2e-test
extends: .e2e-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
<<: *common_api_smoketest_job
stage: e2e-test
extends: .e2e-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
<<: *common_api_smoketest_job
stage: e2e-test
extends: .e2e-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
<<: *common_api_smoketest_job
stage: e2e-test
extends: .e2e-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
<<: *common_api_smoketest_job
stage: e2e-test
extends: .e2e-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
<<: *common_api_smoketest_job
stage: e2e-test
extends: .e2e-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest_follow_api.xml
follow_api_smoketest_negative:
<<: *common_api_smoketest_job
stage: e2e-test
extends: .e2e-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
<<: *common_api_smoketest_job
stage: e2e-test
extends: .e2e-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
<<: *common_api_smoketest_job
stage: e2e-test
extends: .e2e-test-common
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml
- >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_negative/ api_smoketest_tags_api_negative.xml
artifacts:
reports:
junit: api_smoketest_tags_api_negative.xml
version: "3"
services:
python-3.6:
image: hivemind/python:3.6
build:
context: .
dockerfile: ./scripts/ci/python/3.6/Dockerfile
args:
- user=${USER}
- workdir=/home/${USER}
user: ${USER}
shm_size: 0
# Below command makes your container running forever.
# command: ["tail", "-f", "/dev/null"]
python-3.8:
image: hivemind/python:3.8
shm_size: 0
build:
context: .
dockerfile: ./scripts/ci/python/3.8/Dockerfile
args:
- user=${USER}
- workdir=/home/${USER}
user: ${USER}
# Below command makes your container running forever.
# command: ["tail", "-f", "/dev/null"]
postgres-10:
image: hivemind/postgres:10
restart: unless-stopped
build:
context: .
dockerfile: ./scripts/ci/postgres/10/Dockerfile
environment:
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
volumes:
- postgres-10-pgdata:/var/lib/postgresql/data
ports:
- "${POSTGRES_10_PUBLISHED_PORT}:5432"
shm_size: 0
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=12GB",
"-c", "effective_cache_size=36GB",
"-c", "maintenance_work_mem=2GB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=31457kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=12",
"-c", "max_parallel_workers_per_gather=4",
"-c", "max_parallel_workers=12",
]
postgres-12:
image: hivemind/postgres:12
restart: unless-stopped
build:
context: .
dockerfile: ./scripts/ci/postgres/12/Dockerfile
environment:
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
volumes:
- postgres-12-pgdata:/var/lib/postgresql/data
ports:
- "${POSTGRES_12_PUBLISHED_PORT}:5432"
shm_size: 0
# https://pgtune.leopard.in.ua/#/ oltp 48G ram, 12 cpus, ssd
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=12GB",
"-c", "effective_cache_size=36GB",
"-c", "maintenance_work_mem=2GB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=31457kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=12",
"-c", "max_parallel_workers_per_gather=4",
"-c", "max_parallel_workers=12",
"-c", "max_parallel_maintenance_workers=4",
]
hived-node:
image: registry.gitlab.syncad.com/hive/hive/consensus_node:00b5ff55
restart: unless-stopped
# ports:
# - "2001:2001"
# - "8090:8090"
# - "8091:8091"
shm_size: 0
entrypoint: /usr/local/hive/consensus/entrypoint.sh
command: >-
--replay-blockchain
--stop-replay-at-block 5000000
volumes:
- $PWD/scripts/ci/hived-node/entrypoint.sh:/usr/local/hive/consensus/entrypoint.sh
- $PWD/scripts/ci/hived-node/config.ini:/usr/local/hive/consensus/datadir/config.ini
- ${HIVED_BLOCK_LOG_FILE}:/usr/local/hive/consensus/datadir/blockchain/block_log
- hived-node-datadir:/usr/local/hive/consensus/datadir
volumes:
postgres-10-pgdata:
postgres-12-pgdata:
hived-node-datadir:
hive/cli.py 100644 → 100755
#!/usr/local/bin/python3
#!/usr/bin/env python
"""CLI service router"""
import os
import logging
import time
from hive.conf import Conf
from hive.db.adapter import Db
from hive.utils.stats import PrometheusClient
logging.basicConfig()
def setup_logging(conf):
"""Setup logging with timestamps"""
timestamp = conf.get('log_timestamp')
epoch = conf.get('log_epoch')
if timestamp and epoch:
datefmt='%Y-%m-%d %H:%M:%S'
timezone = time.strftime('%z')
fmt = '%(asctime)s.%(msecs)03d{} %(created).6f ' \
'%(levelname)s - %(name)s - %(message)s'.format(timezone)
logging.basicConfig(format=fmt, datefmt=datefmt)
if timestamp:
datefmt='%Y-%m-%d %H:%M:%S'
timezone = time.strftime('%z')
fmt = '%(asctime)s.%(msecs)03d{} ' \
'%(levelname)s - %(name)s - %(message)s'.format(timezone)
logging.basicConfig(format=fmt, datefmt=datefmt)
if epoch:
fmt = '%(created).6f %(levelname)s - %(name)s - %(message)s'
logging.basicConfig(format=fmt)
else:
fmt = '%(levelname)s - %(name)s - %(message)s'
logging.basicConfig(format=fmt)
def run():
"""Run the service specified in the `--mode` argument."""
......@@ -17,6 +42,8 @@ def run():
mode = conf.mode()
PrometheusClient( conf.get('prometheus_port') )
setup_logging(conf)
if mode == 'completion':
conf.generate_completion()
return
......@@ -29,9 +56,9 @@ def run():
if fh is None:
print("Cannot write into specified pid_file: %s", pid_file_name)
else:
pid = os.getpid()
fh.write(str(pid))
fh.close()
pid = os.getpid()
fh.write(str(pid))
fh.close()
if conf.get('test_profile'):
......
......@@ -54,6 +54,11 @@ class Conf():
add('--test-profile', type=strtobool, env_var='TEST_PROFILE', help='(debug) profile execution', default=False)
add('--log-virtual-op-calls', type=strtobool, env_var='LOG_VIRTUAL_OP_CALLS', help='(debug) log virtual op calls and responses', default=False)
# logging
add('--log-timestamp', help='Output timestamp in log', action='store_true')
add('--log-epoch', help='Output unix epoch in log', action='store_true')
add('--log-mask-sensitive-data', help='Mask sensitive data, e.g. passwords', action='store_true')
add('--pid-file', type=str, env_var='PID_FILE', help='Allows to dump current process pid into specified file', default=None)
add('--auto-http-server-port', nargs='+', type=int, help='Hivemind will listen on first available port from this range')
......@@ -80,8 +85,23 @@ class Conf():
root.error("Value error: {}".format(ex))
exit(1)
# Print command line args, but on continuous integration server
# hide db connection string.
from sys import argv
root.info("Used command line args: %s", " ".join(argv[1:]))
if conf.get('log_mask_sensitive_data'):
my_args = []
upcoming_connection_string = False
for elem in argv[1:]:
if upcoming_connection_string:
upcoming_connection_string = False
my_args.append('MASKED')
continue
if elem == '--database-url':
upcoming_connection_string = True
my_args.append(elem)
root.info("Used command line args: %s", " ".join(my_args))
else:
root.info("Used command line args: %s", " ".join(argv[1:]))
# uncomment for full list of program args
#args_list = ["--" + k + " " + str(v) for k,v in vars(args).items()]
......
#!/bin/bash
set -euo pipefail
collect_stats() {
echo "Collecting statistics from database ${HIVEMIND_DB_NAME}"
mkdir -p pg-stats
DIR=$PWD/pg-stats
PGPASSWORD=${POSTGRES_PASSWORD} psql \
--username "${POSTGRES_USER}" \
--host ${POSTGRES_HOST} \
--port ${POSTGRES_PORT} \
--dbname ${HIVEMIND_DB_NAME} << EOF
\timing
\copy (select * from pg_settings) to '$DIR/pg_settings.csv' WITH CSV HEADER
\copy (select * from pg_stat_user_tables) to '$DIR/pg_stat_user_tables.csv' WITH CSV HEADER
-- Disabled, because this table is too big.
--\copy (select * from pg_stat_statements) to '$DIR/pg_stat_statements.csv' WITH CSV HEADER
-- See https://github.com/powa-team/pg_qualstats
\echo pg_qualstats index advisor
SELECT v
FROM json_array_elements(
pg_qualstats_index_advisor(min_filter => 50)->'indexes') v
ORDER BY v::text COLLATE "C";
\echo pg_qualstats unoptimised
SELECT v
FROM json_array_elements(
pg_qualstats_index_advisor(min_filter => 50)->'unoptimised') v
ORDER BY v::text COLLATE "C";
EOF
}
collect_stats
#!/bin/bash
set -euo pipefail
# TODO We have troubles with user, when postgresql is run from docker.
# We need user name `postgres`, not other, I'm afraid.
# ADMIN_POSTGRES_USER=postgres
# ADMIN_POSTGRES_USER_PASSWORD=postgres
create_db() {
echo "Creating user ${HIVEMIND_POSTGRES_USER} and database ${HIVEMIND_DB_NAME}, owned by this user"
PGPASSWORD=${ADMIN_POSTGRES_USER_PASSWORD} psql \
--username "${ADMIN_POSTGRES_USER}" \
--host ${POSTGRES_HOST} \
--port ${POSTGRES_PORT} \
--dbname postgres << EOF
\echo Creating role ${HIVEMIND_POSTGRES_USER}
DO \$$
BEGIN
IF EXISTS (SELECT * FROM pg_user
WHERE pg_user.usename = '${HIVEMIND_POSTGRES_USER}') THEN
raise warning 'Role % already exists', '${HIVEMIND_POSTGRES_USER}';
ELSE
CREATE ROLE ${HIVEMIND_POSTGRES_USER}
WITH LOGIN PASSWORD '${HIVEMIND_POSTGRES_USER_PASSWORD}';
END IF;
END
\$$;
\echo Creating database ${HIVEMIND_DB_NAME}
CREATE DATABASE ${HIVEMIND_DB_NAME} TEMPLATE template_monitoring
OWNER ${HIVEMIND_POSTGRES_USER};
COMMENT ON DATABASE ${HIVEMIND_DB_NAME} IS
'Database for Gitlab CI pipeline ${CI_PIPELINE_URL}, commit ${CI_COMMIT_SHORT_SHA}';
\c ${HIVEMIND_DB_NAME}
create schema hivemind_admin
authorization ${HIVEMIND_POSTGRES_USER};
CREATE SEQUENCE hivemind_admin.database_metadata_id_seq
INCREMENT 1
START 1
MINVALUE 1
MAXVALUE 2147483647
CACHE 1;
CREATE TABLE hivemind_admin.database_metadata
(
id integer NOT NULL DEFAULT
nextval('hivemind_admin.database_metadata_id_seq'::regclass),
database_name text,
ci_pipeline_url text,
ci_pipeline_id integer,
commit_sha text,
created_at timestamp with time zone DEFAULT now(),
CONSTRAINT database_metadata_pkey PRIMARY KEY (id)
);
alter sequence hivemind_admin.database_metadata_id_seq
OWNER TO ${HIVEMIND_POSTGRES_USER};
alter table hivemind_admin.database_metadata
OWNER TO ${HIVEMIND_POSTGRES_USER};
insert into hivemind_admin.database_metadata
(database_name, ci_pipeline_url, ci_pipeline_id, commit_sha)
values (
'${HIVEMIND_DB_NAME}', '${CI_PIPELINE_URL}',
${CI_PIPELINE_ID}, '${CI_COMMIT_SHORT_SHA}'
);
\q
EOF
}
create_db
#!/bin/bash
set -euo pipefail
echo "Dumping database ${HIVEMIND_DB_NAME}"
export PGPASSWORD=${POSTGRES_PASSWORD}
exec_path=$POSTGRES_CLIENT_TOOLS_PATH/$POSTGRES_MAJOR_VERSION/bin
echo "Using pg_dump version $($exec_path/pg_dump --version)"
time $exec_path/pg_dump \
--username="${POSTGRES_USER}" \
--host="${POSTGRES_HOST}" \
--port="${POSTGRES_PORT}" \
--dbname="${HIVEMIND_DB_NAME}" \
--schema=public \
--format=directory \
--jobs=4 \
--compress=6 \
--quote-all-identifiers \
--lock-wait-timeout=30000 \
--no-privileges --no-acl \
--verbose \
--file="pg-dump-${HIVEMIND_DB_NAME}"
unset PGPASSWORD
#!/bin/bash
# Get postgresql server version
set -euo pipefail
get_postgres_version() {
version=$(
PGPASSWORD=$POSTGRES_PASSWORD psql -X -A -t \
--username $POSTGRES_USER \
--host $POSTGRES_HOST \
--port ${POSTGRES_PORT} \
--dbname postgres \
-c "show server_version_num;"
)
echo $(echo $version | cut -c1-2)
}
get_postgres_version
#!/bin/bash
set -euo pipefail
HIVEMIND_PID=0
MERCY_KILL_TIMEOUT=5
START_DELAY=5
# For debug only!
# HIVED_URL='{"default":"http://hived-node:8091"}'
# HIVED_URL='{"default":"http://172.17.0.1:8091"}'
check_pid() {
if [ -f hive_server.pid ]; then
HIVEMIND_PID=`cat hive_server.pid`
else
HIVEMIND_PID=0
fi
}
stop() {
if [ "$HIVEMIND_PID" -gt "0" ]; then
HIVEMIND_PID=`cat hive_server.pid`
# Send INT signal and give it some time to stop.
echo "Stopping hive server (pid $HIVEMIND_PID) gently (SIGINT)"
kill -SIGINT $HIVEMIND_PID || true;
sleep $MERCY_KILL_TIMEOUT
# Send TERM signal. Kill to be sure.
echo "Killing hive server (pid $HIVEMIND_PID) to be sure (SIGTERM)"
kill -9 $HIVEMIND_PID > /dev/null 2>&1 || true;
rm hive_server.pid;
echo "Hive server has been stopped"
else
echo "Hive server is not running"
fi
}
start() {
if [ "$HIVEMIND_PID" -gt "0" ]; then
echo "Hive server is already running (pid $HIVEMIND_PID)"
exit 0
fi
echo "Starting hive server on port ${HIVEMIND_HTTP_PORT}"
hive server \
--log-mask-sensitive-data \
--pid-file hive_server.pid \
--http-server-port $HIVEMIND_HTTP_PORT \
--steemd-url "$HIVED_URL" \
--database-url "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" 2>&1 \
| tee -ia hivemind-server.log &
HIVEMIND_PID=$!
for i in `seq 1 10`; do
if [ -f hive_server.pid ]; then
echo "Hive server has been started (pid $HIVEMIND_PID)"
sleep $START_DELAY
exit 0
else
sleep 1
fi
done
# If we are here something went wrong
echo "Timeout reached. Hive server has not been started, exiting."
exit 1
}
check_pid
"$1"
#!/bin/bash
set -euo pipefail
cat << EOF
Starting hive sync using hived url: ${HIVED_URL}.
Max sync block is: ${HIVEMIND_MAX_BLOCK}.
EOF
# For debug only!
# HIVEMIND_MAX_BLOCK=10001
# HIVED_URL='{"default":"http://hived-node:8091"}'
# HIVED_URL='{"default":"http://172.17.0.1:8091"}'
DATABASE_URL="postgresql://${HIVEMIND_POSTGRES_USER}:${HIVEMIND_POSTGRES_USER_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${HIVEMIND_DB_NAME}"
hive sync \
--log-mask-sensitive-data \
--pid-file hive_sync.pid \
--test-max-block=${HIVEMIND_MAX_BLOCK} \
--exit-after-sync \
--test-profile=False \
--steemd-url "$HIVED_URL" \
--prometheus-port 11011 \
--database-url "$DATABASE_URL" \
2>&1 | tee -i hivemind-sync.log
log-appender = {"appender":"stderr","stream":"std_error"}
log-logger = {"name":"default","level":"info","appender":"stderr"}
backtrace = yes
plugin = webserver p2p json_rpc
plugin = database_api
# condenser_api enabled per abw request
plugin = condenser_api
plugin = block_api
# gandalf enabled witness + rc
plugin = witness
plugin = rc
# market_history enabled per abw request
plugin = market_history
plugin = market_history_api
plugin = account_history_rocksdb
plugin = account_history_api
# gandalf enabled transaction status
plugin = transaction_status
plugin = transaction_status_api
# gandalf enabled account by key
plugin = account_by_key
plugin = account_by_key_api
# and few apis
plugin = block_api network_broadcast_api rc_api
history-disable-pruning = 1
account-history-rocksdb-path = "blockchain/account-history-rocksdb-storage"
#shared-file-dir = "/run/hive"
shared-file-size = 20G
shared-file-full-threshold = 9500
shared-file-scale-rate = 1000
flush-state-interval = 0
market-history-bucket-size = [15,60,300,3600,86400]
market-history-buckets-per-size = 5760
p2p-endpoint = 0.0.0.0:2001
p2p-seed-node =
#gtg.openhive.network:2001
transaction-status-block-depth = 64000
transaction-status-track-after-block = 42000000
webserver-http-endpoint = 0.0.0.0:8091
webserver-ws-endpoint = 0.0.0.0:8090
webserver-thread-pool-size = 8
#!/usr/bin/env bash
SCRIPT=`realpath $0`
SCRIPTPATH=`dirname $SCRIPT`
DATADIR="${SCRIPTPATH}/datadir"
HIVED="${SCRIPTPATH}/bin/hived"
ARGS="$@"
ARGS+=" "
if [[ ! -z "$TRACK_ACCOUNT" ]]; then
ARGS+=" --plugin=account_history --plugin=account_history_api"
ARGS+=" --account-history-track-account-range=[\"$TRACK_ACCOUNT\",\"$TRACK_ACCOUNT\"]"
fi
if [[ "$USE_PUBLIC_BLOCKLOG" ]]; then
if [[ ! -e ${DATADIR}/blockchain/block_log ]]; then
if [[ ! -d ${DATADIR}/blockchain ]]; then
mkdir -p ${DATADIR}/blockchain
fi
echo "Hived: Downloading a block_log and replaying the blockchain"
echo "This may take a little while..."
wget -O ${DATADIR}/blockchain/block_log https://gtg.steem.house/get/blockchain/block_log
ARGS+=" --replay-blockchain"
fi
fi
"$HIVED" \
--data-dir="${DATADIR}" \
$ARGS \
2>&1
#!/usr/bin/env bash
# Start hived in docker container, replay up to 5000000 blocks
MYDIR="$PWD"
WORKDIR="/usr/local/hive/consensus"
docker run -d \
--name hived-replay-5000000 \
-p 127.0.0.1:2001:2001 \
-p 127.0.0.1:8090:8090 \
-p 127.0.0.1:8091:8091 \
-v $MYDIR/config.ini:$WORKDIR/datadir/config.ini \
-v $MYDIR/blockchain/block_log:$WORKDIR/datadir/blockchain/block_log \
-v $MYDIR/entrypoint.sh:$WORKDIR/entrypoint.sh \
--entrypoint $WORKDIR/entrypoint.sh \
registry.gitlab.syncad.com/hive/hive/consensus_node:00b5ff55 \
--replay-blockchain --stop-replay-at-block 5000000
FROM postgres:10.14
LABEL description="Available non-standard extensions: plpython2, pg_qualstats."
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
nano \
postgresql-plpython3-10 \
python3-psutil \
postgresql-10-pg-qualstats \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir -p /docker-entrypoint-initdb.d
# Create stuff for monitoring with pgwatch2 and pghero.
COPY ./scripts/db-monitoring/setup/setup_monitoring.sh \
/docker-entrypoint-initdb.d/
COPY ./scripts/db-monitoring/setup/sql-monitoring /sql-monitoring/
FROM postgres:12.4
LABEL description="Available non-standard extensions: plpython2, pg_qualstats."
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
nano \
postgresql-plpython3-12 \
python3-psutil \
postgresql-12-pg-qualstats \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir -p /docker-entrypoint-initdb.d
# Create stuff for monitoring with pgwatch2 and pghero.
COPY ./scripts/db-monitoring/setup/setup_monitoring.sh \
/docker-entrypoint-initdb.d/
COPY ./scripts/db-monitoring/setup/sql-monitoring /sql-monitoring/
FROM python:3.6.12-buster
# Setup python environment.
ENV LANG C.UTF-8
ENV LC_ALL C.UTF-8
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONFAULTHANDLER 1
# Install debian packages.
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
curl \
ca-certificates \
gnupg \
&& rm -rf /var/lib/apt/lists/*
# Install debian pgdg repository.
RUN curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
RUN echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" \
> /etc/apt/sources.list.d/pgdg.list
RUN apt-get update
# Install postgresql client programs for various postgresl versions.
RUN apt-get install -y --no-install-recommends \
postgresql-client-10 \
postgresql-client-11 \
postgresql-client-12 \
postgresql-client-13 \
&& rm -rf /var/lib/apt/lists/*
# Upgrade some crucial python packages.
RUN pip install --upgrade pip setuptools wheel
# Install python dependencies via pip.
RUN pip install pipenv poetry
ARG user
ENV user ${user}
## Add user ##
RUN groupadd --gid 1000 ${user} \
&& useradd --create-home --uid 1000 --gid ${user} ${user}
# Gitlab CI accepts only root user, so we don't set user here.
# You can (and should) run command in container as user `alice` this way:
# docker-compose run --rm --user=alice --name=myrunner runner /bin/bash
# USER ${user}
WORKDIR /home/${user}
RUN chown -R ${user}:${user} /home/${user}
CMD [ "python3" ]
FROM python:3.8.3-buster
# Setup python environment.
ENV LANG C.UTF-8
ENV LC_ALL C.UTF-8
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONFAULTHANDLER 1
# Install debian packages.
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
curl \
ca-certificates \
gnupg \
&& rm -rf /var/lib/apt/lists/*
# Install debian pgdg repository.
RUN curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
RUN echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" \
> /etc/apt/sources.list.d/pgdg.list
RUN apt-get update
# Install postgresql client programs for various postgresl versions.
RUN apt-get install -y --no-install-recommends \
postgresql-client-10 \
postgresql-client-11 \
postgresql-client-12 \
postgresql-client-13 \
&& rm -rf /var/lib/apt/lists/*
# Upgrade some crucial python packages.
RUN pip install --upgrade pip setuptools wheel
# Install python dependencies via pip.
RUN pip install pipenv poetry
ARG user
ENV user ${user}
## Add user ##
RUN groupadd --gid 1000 ${user} \
&& useradd --create-home --uid 1000 --gid ${user} ${user}
# Gitlab CI accepts only root user, so we don't set user here.
# You can (and should) run command in container as user `alice` this way:
# docker-compose run --rm --user=alice --name=myrunner runner /bin/bash
# USER ${user}
WORKDIR /home/${user}
RUN chown -R ${user}:${user} /home/${user}
CMD [ "python3" ]
#!/bin/bash
set -e
pip install tox
export HIVEMIND_ADDRESS=$1
export HIVEMIND_PORT=$2
echo "Starting tests on hivemind server running on ${HIVEMIND_ADDRESS}:${HIVEMIND_PORT}"
echo "Selected test group (if empty all will be executed): $3"
tox -- -W ignore::pytest.PytestDeprecationWarning -n auto --durations=0 \
--junitxml=../../../../$4 $3
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment