Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 127-eliminate-more-references-to-hive_posts_view
  • 221-vacuum-hivemind-tables
  • 267-rebased-onto-develop
  • 267-update-notification-cache-3
  • 676-as-tiny-assets
  • 72-postgres-warning-about-wrong-collation-in-recursive_deps-2
  • abw_ecv_op_experiment
  • abw_max_retries
  • abw_post_delete_fix
  • abw_rshares_experiment
  • add-git-to-requirements
  • add-vote-info
  • arc-get-content-deleted
  • as-tmp-to-remove
  • asuch-limit-follows
  • asuch-postgrest-fixing-problems
  • asuch-replace-python-with-sql-get-follow-count
  • asuch-tmp-wip-condenser-get-blog
  • autoexplain-python
  • bridge_api.get_ranked_posts_fixes
  • bw_1_27_5rc8_2master
  • bw_develop-haf-rebase
  • bw_docker_supplement
  • bw_entrypoint_signal_handler_fix
  • bw_haf_compat_sync
  • bw_hafah_datasource_test
  • bw_master2develop
  • bw_mi/hivemind_wit_sa_btracker_rebase
  • bw_rebased_develop-haf
  • bw_restore_log_memory_usage_call
  • bw_simplify_blacklisted_by_observer_view
  • bw_temp_hived_source_node_verification
  • bw_update_posts_rshares_speedup
  • bw_v1_27_5_0_0_rc0
  • change-python-limits
  • cherry-pick-5dd1da34
  • cherry-pick-98eaf112
  • complete-refactor
  • db-upgrade-assert
  • deployed_20200917
  • deployed_20200928
  • deployed_20200928_pure
  • deployed_20200928_reversible_ops
  • deployed_fixes_2_develop
  • develop
  • develop-haf-backup
  • dk-benchmarks-ci-improvements
  • dk-communities-unit-tests
  • dk-get-ids-for-accounts-hotfix
  • dk-issue-3-concurrent-block-query
  • dk-list-votes-pre24
  • dk-migration-script-tags-support
  • dk-num-block-hive-feed-cache
  • dk-readme-update
  • dk-reputation_api_support
  • dk-revert-black-lists
  • dk-sql-file-list
  • dk-sql-script-executor
  • dk-sql-scripts-from-schema
  • dk-xdist-and-time
  • dn-autovacuum
  • dn-default-autovacuum
  • dn-testing
  • dn_get_block_range
  • dn_parallel_safe
  • dn_prof
  • doc-fix
  • dockerfile-update-fix
  • emf-limit-follows
  • enum_block_operations-support
  • feature/beneficiaries_communities
  • feature/hive_votes_no_index
  • feature/mute-reason-test
  • feature/mute-reason_rebase
  • feature/new_communities_type_old
  • feature/new_community_types
  • feature/role-only-if-subscribed-test1
  • fix-duplicate-pinned-posts
  • fixing-tests-with-limits
  • follow-deltas
  • follow-redesign
  • follow-redesign-speedups
  • follow-redesign-tests
  • follow_api_tests
  • get-discussion-experiment
  • hivemind_testers
  • imwatsi-first-steps
  • jes2850-decentralized-lists
  • jsalyers-add-a-cascade
  • jsalyers-fix-muting-for-reblogs
  • jsalyers-fix-muting-on-bridge-get-discussion
  • jsalyers-muting-v2
  • jsalyers-test-mute-changes
  • kbotor/backup/building-hivemind-from-other-repos
  • kbotor/building-hivemind-from-other-repos
  • kbotor/ci-rewrite-for-parallel-replay
  • km_ah_api
  • km_get_content_2_0
  • km_get_content_fill_missing2deployed
  • km_history
  • 0.25.4
  • 1.25.0rc0
  • 1.25.2rc
  • 1.26.0
  • 1.26.1
  • 1.26.2
  • 1.26.3
  • 1.27.0.dev0
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.3.0.0
  • 1.27.3.0.0dev11
  • 1.27.3.0.0dev12
  • 1.27.3.0.0dev7
  • 1.27.5
  • 1.27.5.0.0rc7
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc3
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • Before-dk-issue-3
  • Full-sync
  • Full-sync-20200928
  • Full-sync-20201026
  • ScheduledBenchmarkTesting_18_Aug
  • env/staging-permissions
  • full_hive_sync_17_05_2021
  • git_versioning_support
  • hivemind_ci_support
  • tmp-logs
  • v1.24-RC
  • v1.24.0
  • v1.24.1
  • v1.24.1-RC
  • v1.24.2
  • v1.25.1
  • v1.25.2
  • v1.25.3
  • v1.25.4
  • v1.26.0
  • v1.27.4.0.0
  • v1.27.4.0.0dev1
  • v1.27.4.0.0dev2
160 results

Target

Select target project
  • hive/hivemind
1 result
Select Git revision
  • 127-eliminate-more-references-to-hive_posts_view
  • 221-vacuum-hivemind-tables
  • 267-rebased-onto-develop
  • 267-update-notification-cache-3
  • 676-as-tiny-assets
  • 72-postgres-warning-about-wrong-collation-in-recursive_deps-2
  • abw_ecv_op_experiment
  • abw_max_retries
  • abw_post_delete_fix
  • abw_rshares_experiment
  • add-git-to-requirements
  • add-vote-info
  • arc-get-content-deleted
  • as-tmp-to-remove
  • asuch-limit-follows
  • asuch-postgrest-fixing-problems
  • asuch-replace-python-with-sql-get-follow-count
  • asuch-tmp-wip-condenser-get-blog
  • autoexplain-python
  • bridge_api.get_ranked_posts_fixes
  • bw_1_27_5rc8_2master
  • bw_develop-haf-rebase
  • bw_docker_supplement
  • bw_entrypoint_signal_handler_fix
  • bw_haf_compat_sync
  • bw_hafah_datasource_test
  • bw_master2develop
  • bw_mi/hivemind_wit_sa_btracker_rebase
  • bw_rebased_develop-haf
  • bw_restore_log_memory_usage_call
  • bw_simplify_blacklisted_by_observer_view
  • bw_temp_hived_source_node_verification
  • bw_update_posts_rshares_speedup
  • bw_v1_27_5_0_0_rc0
  • change-python-limits
  • cherry-pick-5dd1da34
  • cherry-pick-98eaf112
  • complete-refactor
  • db-upgrade-assert
  • deployed_20200917
  • deployed_20200928
  • deployed_20200928_pure
  • deployed_20200928_reversible_ops
  • deployed_fixes_2_develop
  • develop
  • develop-haf-backup
  • dk-benchmarks-ci-improvements
  • dk-communities-unit-tests
  • dk-get-ids-for-accounts-hotfix
  • dk-issue-3-concurrent-block-query
  • dk-list-votes-pre24
  • dk-migration-script-tags-support
  • dk-num-block-hive-feed-cache
  • dk-readme-update
  • dk-reputation_api_support
  • dk-revert-black-lists
  • dk-sql-file-list
  • dk-sql-script-executor
  • dk-sql-scripts-from-schema
  • dk-xdist-and-time
  • dn-autovacuum
  • dn-default-autovacuum
  • dn-testing
  • dn_get_block_range
  • dn_parallel_safe
  • dn_prof
  • doc-fix
  • dockerfile-update-fix
  • emf-limit-follows
  • enum_block_operations-support
  • feature/beneficiaries_communities
  • feature/hive_votes_no_index
  • feature/mute-reason-test
  • feature/mute-reason_rebase
  • feature/new_communities_type_old
  • feature/new_community_types
  • feature/role-only-if-subscribed-test1
  • fix-duplicate-pinned-posts
  • fixing-tests-with-limits
  • follow-deltas
  • follow-redesign
  • follow-redesign-speedups
  • follow-redesign-tests
  • follow_api_tests
  • get-discussion-experiment
  • hivemind_testers
  • imwatsi-first-steps
  • jes2850-decentralized-lists
  • jsalyers-add-a-cascade
  • jsalyers-fix-muting-for-reblogs
  • jsalyers-fix-muting-on-bridge-get-discussion
  • jsalyers-muting-v2
  • jsalyers-test-mute-changes
  • kbotor/backup/building-hivemind-from-other-repos
  • kbotor/building-hivemind-from-other-repos
  • kbotor/ci-rewrite-for-parallel-replay
  • km_ah_api
  • km_get_content_2_0
  • km_get_content_fill_missing2deployed
  • km_history
  • 0.25.4
  • 1.25.0rc0
  • 1.25.2rc
  • 1.26.0
  • 1.26.1
  • 1.26.2
  • 1.26.3
  • 1.27.0.dev0
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.3.0.0
  • 1.27.3.0.0dev11
  • 1.27.3.0.0dev12
  • 1.27.3.0.0dev7
  • 1.27.5
  • 1.27.5.0.0rc7
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc3
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • Before-dk-issue-3
  • Full-sync
  • Full-sync-20200928
  • Full-sync-20201026
  • ScheduledBenchmarkTesting_18_Aug
  • env/staging-permissions
  • full_hive_sync_17_05_2021
  • git_versioning_support
  • hivemind_ci_support
  • tmp-logs
  • v1.24-RC
  • v1.24.0
  • v1.24.1
  • v1.24.1-RC
  • v1.24.2
  • v1.25.1
  • v1.25.2
  • v1.25.3
  • v1.25.4
  • v1.26.0
  • v1.27.4.0.0
  • v1.27.4.0.0dev1
  • v1.27.4.0.0dev2
160 results
Show changes
Commits on Source (79)
Showing
with 1947 additions and 1507 deletions
...@@ -37,6 +37,7 @@ var/ ...@@ -37,6 +37,7 @@ var/
*.egg-info/ *.egg-info/
.installed.cfg .installed.cfg
*.egg *.egg
pip-wheel-metadata
# PyInstaller # PyInstaller
# Usually these files are written by a python script from a template # Usually these files are written by a python script from a template
...@@ -134,10 +135,12 @@ tests/failed_blocks/ ...@@ -134,10 +135,12 @@ tests/failed_blocks/
# version.py # version.py
hive/version.py hive/version.py
# hivemind.port
hivemind.port hivemind.port
hive_server.pid
hivemind-server.pid
Pipfile.lock Pipfile.lock
pghero.yml pghero.yml
*~ *~
.tmp
# https://hub.docker.com/r/library/python/tags/
image: "python:3.7"
stages:
- build
- test
- data-supply
- deploy
- e2e-test
- post-deploy
variables:
GIT_DEPTH: 1
LC_ALL: "C"
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG"
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID))
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
before_script:
- pwd
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
hivemind_build:
stage: build
script:
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- when: always
tags:
- hivemind
hivemind_sync:
stage: data-supply
environment:
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME"
needs:
- job: hivemind_build
artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- pip3 install --user --upgrade pip setuptools
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_MAX_BLOCK $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hivemind-sync.log
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script:
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT
artifacts:
paths:
- hive_server.pid
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
hivemind_stop_server:
stage: post-deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- when: manual
script:
- scripts/ci_stop_server.sh hive_server.pid
needs:
- job: hivemind_start_server
artifacts: true
tags:
- hivemind
artifacts:
paths:
- hive_server.log
.hivemind_start_api_smoketest: &common_api_smoketest_job
stage: e2e-test
environment: hive-4.pl.syncad.com
needs:
- job: hivemind_start_server
artifacts: true
variables:
GIT_STRATEGY: none
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags:
- hivemind
bridge_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml
artifacts:
reports:
junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts:
reports:
junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts:
reports:
junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts:
reports:
junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml
artifacts:
reports:
junit: api_smoketest_database_api.xml
database_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml
artifacts:
reports:
junit: api_smoketest_database_api_negative.xml
follow_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml
artifacts:
reports:
junit: api_smoketest_follow_api.xml
follow_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts:
reports:
junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml
artifacts:
reports:
junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative:
<<: *common_api_smoketest_job
script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml
artifacts:
reports:
junit: api_smoketest_tags_api_negative.xml
# https://hub.docker.com/r/library/python/tags/
image: "python:3.7"
stages: stages:
- build - build
- test - data-supply
- data-supply - e2e-test
- deploy
- e2e-test
- post-deploy
variables: variables:
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# GIT_DEPTH: 10
GIT_DEPTH: 1 GIT_DEPTH: 1
LC_ALL: "C"
# GIT_STRATEGY: fetch # Noticed errors with that.
GIT_STRATEGY: clone GIT_STRATEGY: clone
# GIT_STRATEGY: none
GIT_SUBMODULE_STRATEGY: recursive GIT_SUBMODULE_STRATEGY: recursive
GIT_CLONE_PATH: $CI_BUILDS_DIR/$CI_COMMIT_REF_SLUG/$CI_CONCURRENT_ID/project-name
HIVEMIND_SOURCE_HIVED_URL: $HIVEMIND_SOURCE_HIVED_URL PIPENV_VENV_IN_PROJECT: 1
HIVEMIND_DB_NAME: "hive_$CI_COMMIT_REF_SLUG" PIPENV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pipenv"
HIVEMIND_HTTP_PORT: $((HIVEMIND_HTTP_PORT + CI_CONCURRENT_ID)) PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
# Configured at gitlab repository settings side
POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
# official way to provide password to psql: http://www.postgresql.org/docs/9.3/static/libpq-envars.html
PGPASSWORD: $HIVEMIND_POSTGRES_PASSWORD
before_script: POSTGRES_CLIENT_TOOLS_PATH: /usr/lib/postgresql
- pwd
- echo "CI_NODE_TOTAL is $CI_NODE_TOTAL"
- echo "CI_NODE_INDEX is $CI_NODE_INDEX"
- echo "CI_CONCURRENT_ID is $CI_CONCURRENT_ID"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
hivemind_build: # POSTGRES_HOST: 172.17.0.1 # Host
stage: build # POSTGRES_HOST: postgres-10 # Docker service
script: POSTGRES_PORT: 5432
- pip3 install --user --upgrade pip setuptools
- git fetch --tags
- git tag -f ci_implicit_tag
- echo $PYTHONUSERBASE
- "python3 setup.py bdist_egg"
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 1 week
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- when: always
tags: # Set on project level in Gitlab CI.
- hivemind # We need create role and create db privileges.
# ADMIN_POSTGRES_USER: postgres
# ADMIN_POSTGRES_USER_PASSWORD: postgres
hivemind_sync: # Needed by old runner ssh-executor, probably.
stage: data-supply POSTGRES_USER: $HIVEMIND_POSTGRES_USER
POSTGRES_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
POSTGRES_HOST_AUTH_METHOD: trust
environment: HIVEMIND_DB_NAME: "hive_${CI_COMMIT_REF_SLUG}_pipeline_id_${CI_PIPELINE_ID}"
name: "hive sync built from branch $CI_COMMIT_REF_NAME targeting database $HIVEMIND_DB_NAME" HIVEMIND_EXEC_NAME: $DB_NAME
# Set on project level in Gitlab CI.
# HIVEMIND_POSTGRES_USER: hivemind_ci
# Set on project level in Gitlab CI.
HIVEMIND_POSTGRES_USER_PASSWORD: $HIVEMIND_POSTGRES_PASSWORD
# Set on project level in Gitlab CI.
# HIVEMIND_HTTP_PORT: 18080
# Set on project level in Gitlab CI.
# HIVEMIND_MAX_BLOCK: 10001
# HIVEMIND_MAX_BLOCK: 5000001
# Set on project level in Gitlab CI.
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://hive-4.pl.syncad.com:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"192.168.6.136:8091"}
# HIVEMIND_SOURCE_HIVED_URL: {"default":"http://172.17.0.1:8091"}
.postgres-10: &postgres-10
name: hivemind/postgres:10
alias: db
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=2GB",
"-c", "effective_cache_size=6GB",
"-c", "maintenance_work_mem=512MB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=5242kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=4",
"-c", "max_parallel_workers_per_gather=2",
"-c", "max_parallel_workers=4",
]
.postgres-12: &postgres-12
name: hivemind/postgres:12
alias: db
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=2GB",
"-c", "effective_cache_size=6GB",
"-c", "maintenance_work_mem=512MB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=5242kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=4",
"-c", "max_parallel_workers_per_gather=2",
"-c", "max_parallel_workers=4",
]
.setup-pip: &setup-pip
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- time pip install --editable .
.setup-setuptools: &setup-setuptools
- python -m venv .venv
- source .venv/bin/activate
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- time python setup.py develop
# no virtual environment
.setuptools: &setup-setuptools-no-venv
# setuptools will install all dependencies to this directory.
- export PYTHONUSERBASE=./local-site
- time pip install --upgrade pip setuptools wheel
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
- mkdir -p `python -m site --user-site`
- python setup.py install --user --force
# we can probably also run via: ./hive/cli.py
- ln -sf ./local-site/bin/hive "$HIVEMIND_EXEC_NAME"
.setup-pipenv: &setup-pipenv
## Note, that Pipfile must exist.
## `--sequential` is slower, but doesn't emit messages about errors
## and need to repeat install.
## - pipenv sync --dev --bare --sequential
## It's faster than `--sequential`, but emits messages about errors
## and a need to repeat install, sometimes. However seems these
## errors are negligible.
- time pipenv sync --dev --bare
- source .venv/bin/activate
- pip --version
- easy_install --version
- wheel version
- pipenv --version
- poetry --version
.set-variables: &set-variables
- whoami
# list all variables predefined by Gitlab CI
# - export
- echo "CI_PIPELINE_URL is $CI_PIPELINE_URL"
- echo "CI_PIPELINE_ID is $CI_PIPELINE_ID"
- echo "CI_COMMIT_SHORT_SHA is $CI_COMMIT_SHORT_SHA"
- echo "CI_COMMIT_REF_SLUG is $CI_COMMIT_REF_SLUG"
- export HIVEMIND_DB_NAME=${HIVEMIND_DB_NAME//[^a-zA-Z0-9_]/_}
- echo "HIVEMIND_DB_NAME is $HIVEMIND_DB_NAME"
- export HIVEMIND_POSTGRESQL_CONNECTION_STRING=postgresql://${HIVEMIND_POSTGRES_USER}:${HIVEMIND_POSTGRES_USER_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${HIVEMIND_DB_NAME}
needs: .fetch-git-tags: &fetch-git-tags
- job: hivemind_build # - git fetch --tags
artifacts: true - git tag -f ci_implicit_tag # Needed to build python package
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
script: .start_timer: &start-timer
- pip3 install --user --upgrade pip setuptools - ./scripts/ci/timer.sh start
- scripts/ci_sync.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_MAX_BLOCK $HIVEMIND_HTTP_PORT
artifacts: .stop-timer: &stop-timer
paths: - ./scripts/ci/timer.sh check
- hivemind-sync.log
expire_in: 1 week .hive-sync-script-common: &hive-sync-script-common
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- export POSTGRES_MAJOR_VERSION=$(./scripts/ci/get-postgres-version.sh)
- ./scripts/ci/create-db.sh
- ./scripts/ci/hive-sync.sh
- ./scripts/ci/collect-db-stats.sh
.default-rules: &default-rules
rules: rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"' - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always when: always
...@@ -89,188 +207,217 @@ hivemind_sync: ...@@ -89,188 +207,217 @@ hivemind_sync:
when: manual when: manual
- when: on_success - when: on_success
default:
image: hivemind/python:3.6
# image: hivemind/python:3.8
interruptible: false
timeout: 2h
cache: &global-cache
# Per-branch caching. CI_COMMIT_REF_SLUG is the same thing.
# key: "$CI_COMMIT_REF_NAME"
# Per project caching – use any key. Change this key, if you need
# to clear cache
key: common-1
paths:
- .cache/
- .venv/
- .tox/
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
after_script:
- *stop-timer
##### Jobs #####
.build-egg:
stage: build
needs: []
script:
- python setup.py bdist_egg
- ls -l dist/*
artifacts:
paths:
- dist/
expire_in: 7 days
tags: tags:
- hivemind - hivemind-light-job
hivemind_start_server:
stage: deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
url: "http://hive-4.pl.syncad.com:$HIVEMIND_HTTP_PORT"
on_stop: hivemind_stop_server
needs:
- job: hivemind_build
artifacts: true
# - job: hivemind_sync
# artifacts: true
variables:
GIT_STRATEGY: none
PYTHONUSERBASE: ./local-site
.build-wheel:
stage: build
needs: []
script: script:
- scripts/ci_start_server.sh "$HIVEMIND_DB_NAME" "$HIVEMIND_POSTGRESQL_CONNECTION_STRING" "$HIVEMIND_SOURCE_HIVED_URL" $HIVEMIND_HTTP_PORT - python setup.py bdist_wheel
- ls -l dist/*
artifacts: artifacts:
paths: paths:
- hive_server.pid - dist/
expire_in: 1 week expire_in: 7 days
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "develop"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success
tags: tags:
- hivemind - hivemind-light-job
hivemind_stop_server:
stage: post-deploy
environment:
name: "hive serve built from branch $CI_COMMIT_REF_NAME exposed on port $HIVEMIND_HTTP_PORT"
action: stop
variables: # Postgres shared
GIT_STRATEGY: none hivemind-sync:
rules: <<: *default-rules
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"' stage: data-supply
when: always needs: []
- when: manual
script: script:
- scripts/ci_stop_server.sh hive_server.pid - *hive-sync-script-common
artifacts:
needs: paths:
- job: hivemind_start_server - hivemind-sync.log
artifacts: true - pg-stats
expire_in: 7 days
tags: tags:
- hivemind - hivemind-heavy-job
# Postgres as service
.hivemind-sync:
<<: *default-rules
stage: data-supply
services:
- *postgres-10
# - *postgres-12
needs: []
script:
- *hive-sync-script-common
# - ./scripts/ci/dump-db.sh
artifacts: artifacts:
paths: paths:
- hive_server.log - hivemind-sync.log
- pg-stats
.hivemind_start_api_smoketest: &common_api_smoketest_job - pg-dump-${HIVEMIND_DB_NAME}
stage: e2e-test expire_in: 7 hours
environment: hive-4.pl.syncad.com tags:
needs: - hivemind-heavy-job
- job: hivemind_start_server
artifacts: true
variables:
GIT_STRATEGY: none
.e2e-test-common:
rules: rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push"'
when: manual
- when: on_success - when: on_success
needs:
- job: hivemind-sync
artifacts: false
before_script:
- *start-timer
- *fetch-git-tags
- *set-variables
- *setup-pip
- ./scripts/ci/wait-for-postgres.sh ${POSTGRES_HOST} ${POSTGRES_PORT}
- ./scripts/ci/hive-server.sh start
after_script:
- ./scripts/ci/hive-server.sh stop
- *stop-timer
tags: tags:
- hivemind - hivemind-light-job
bridge_api_smoketest: bridge_api_smoketest:
<<: *common_api_smoketest_job stage: e2e-test
extends: .e2e-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_patterns/ api_smoketest_bridge.xml - >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_patterns/ api_smoketest_bridge.xml
artifacts: artifacts:
reports: reports:
junit: api_smoketest_bridge.xml junit: api_smoketest_bridge.xml
bridge_api_smoketest_negative: bridge_api_smoketest_negative:
<<: *common_api_smoketest_job stage: e2e-test
extends: .e2e-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" bridge_api_negative/ api_smoketest_bridge_negative.xml - >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
bridge_api_negative/ api_smoketest_bridge_negative.xml
artifacts: artifacts:
reports: reports:
junit: api_smoketest_bridge_negative.xml junit: api_smoketest_bridge_negative.xml
condenser_api_smoketest: condenser_api_smoketest:
<<: *common_api_smoketest_job stage: e2e-test
extends: .e2e-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_patterns/ api_smoketest_condenser_api.xml - >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_patterns/ api_smoketest_condenser_api.xml
artifacts: artifacts:
reports: reports:
junit: api_smoketest_condenser_api.xml junit: api_smoketest_condenser_api.xml
condenser_api_smoketest_negative: condenser_api_smoketest_negative:
<<: *common_api_smoketest_job stage: e2e-test
extends: .e2e-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" condenser_api_negative/ api_smoketest_condenser_api_negative.xml - >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
condenser_api_negative/ api_smoketest_condenser_api_negative.xml
artifacts: artifacts:
reports: reports:
junit: api_smoketest_condenser_api_negative.xml junit: api_smoketest_condenser_api_negative.xml
database_api_smoketest: database_api_smoketest:
<<: *common_api_smoketest_job stage: e2e-test
extends: .e2e-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_patterns/ api_smoketest_database_api.xml - >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_patterns/ api_smoketest_database_api.xml
artifacts: artifacts:
reports: reports:
junit: api_smoketest_database_api.xml junit: api_smoketest_database_api.xml
database_api_smoketest_negative: database_api_smoketest_negative:
<<: *common_api_smoketest_job stage: e2e-test
extends: .e2e-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" database_api_negative/ api_smoketest_database_api_negative.xml - >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
database_api_negative/ api_smoketest_database_api_negative.xml
artifacts: artifacts:
reports: reports:
junit: api_smoketest_database_api_negative.xml junit: api_smoketest_database_api_negative.xml
follow_api_smoketest: follow_api_smoketest:
<<: *common_api_smoketest_job stage: e2e-test
extends: .e2e-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_patterns/ api_smoketest_follow_api.xml - >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_patterns/ api_smoketest_follow_api.xml
artifacts: artifacts:
reports: reports:
junit: api_smoketest_follow_api.xml junit: api_smoketest_follow_api.xml
follow_api_smoketest_negative: follow_api_smoketest_negative:
<<: *common_api_smoketest_job stage: e2e-test
extends: .e2e-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" follow_api_negative/ api_smoketest_follow_api_negative.xml - >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
follow_api_negative/ api_smoketest_follow_api_negative.xml
artifacts: artifacts:
reports: reports:
junit: api_smoketest_follow_api_negative.xml junit: api_smoketest_follow_api_negative.xml
tags_api_smoketest: tags_api_smoketest:
<<: *common_api_smoketest_job stage: e2e-test
extends: .e2e-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_patterns/ api_smoketest_tags_api.xml - >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_patterns/ api_smoketest_tags_api.xml
artifacts: artifacts:
reports: reports:
junit: api_smoketest_tags_api.xml junit: api_smoketest_tags_api.xml
tags_api_smoketest_negative: tags_api_smoketest_negative:
<<: *common_api_smoketest_job stage: e2e-test
extends: .e2e-test-common
script: script:
- scripts/ci_start_api_smoketest.sh localhost "$HIVEMIND_HTTP_PORT" tags_api_negative/ api_smoketest_tags_api_negative.xml - >
scripts/ci/start-api-smoketest.sh localhost "$HIVEMIND_HTTP_PORT"
tags_api_negative/ api_smoketest_tags_api_negative.xml
artifacts: artifacts:
reports: reports:
junit: api_smoketest_tags_api_negative.xml junit: api_smoketest_tags_api_negative.xml
version: "3"
services:
python-3.6:
image: hivemind/python:3.6
build:
context: .
dockerfile: ./scripts/ci/python/3.6/Dockerfile
args:
- user=${USER}
- workdir=/home/${USER}
user: ${USER}
shm_size: 0
# Below command makes your container running forever.
# command: ["tail", "-f", "/dev/null"]
python-3.8:
image: hivemind/python:3.8
shm_size: 0
build:
context: .
dockerfile: ./scripts/ci/python/3.8/Dockerfile
args:
- user=${USER}
- workdir=/home/${USER}
user: ${USER}
# Below command makes your container running forever.
# command: ["tail", "-f", "/dev/null"]
postgres-10:
image: hivemind/postgres:10
restart: unless-stopped
build:
context: .
dockerfile: ./scripts/ci/postgres/10/Dockerfile
environment:
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
volumes:
- postgres-10-pgdata:/var/lib/postgresql/data
ports:
- "${POSTGRES_10_PUBLISHED_PORT}:5432"
shm_size: 0
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=12GB",
"-c", "effective_cache_size=36GB",
"-c", "maintenance_work_mem=2GB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=31457kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=12",
"-c", "max_parallel_workers_per_gather=4",
"-c", "max_parallel_workers=12",
]
postgres-12:
image: hivemind/postgres:12
restart: unless-stopped
build:
context: .
dockerfile: ./scripts/ci/postgres/12/Dockerfile
environment:
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
volumes:
- postgres-12-pgdata:/var/lib/postgresql/data
ports:
- "${POSTGRES_12_PUBLISHED_PORT}:5432"
shm_size: 0
# https://pgtune.leopard.in.ua/#/ oltp 48G ram, 12 cpus, ssd
command: [
"postgres",
"-c", "shared_preload_libraries=pg_stat_statements,pg_qualstats",
"-c", "track_functions=pl",
"-c", "track_io_timing=on",
"-c", "track_activity_query_size=2048",
"-c", "pg_stat_statements.max=10000",
"-c", "pg_stat_statements.track=all",
"-c", "max_connections=100",
"-c", "shared_buffers=12GB",
"-c", "effective_cache_size=36GB",
"-c", "maintenance_work_mem=2GB",
"-c", "checkpoint_completion_target=0.9",
"-c", "wal_buffers=16MB",
"-c", "default_statistics_target=100",
"-c", "random_page_cost=1.1",
"-c", "effective_io_concurrency=200",
"-c", "work_mem=31457kB",
"-c", "min_wal_size=2GB",
"-c", "max_wal_size=8GB",
"-c", "max_worker_processes=12",
"-c", "max_parallel_workers_per_gather=4",
"-c", "max_parallel_workers=12",
"-c", "max_parallel_maintenance_workers=4",
]
hived-node:
image: registry.gitlab.syncad.com/hive/hive/consensus_node:00b5ff55
restart: unless-stopped
# ports:
# - "2001:2001"
# - "8090:8090"
# - "8091:8091"
shm_size: 0
entrypoint: /usr/local/hive/consensus/entrypoint.sh
command: >-
--replay-blockchain
--stop-replay-at-block 5000000
volumes:
- $PWD/scripts/ci/hived-node/entrypoint.sh:/usr/local/hive/consensus/entrypoint.sh
- $PWD/scripts/ci/hived-node/config.ini:/usr/local/hive/consensus/datadir/config.ini
- ${HIVED_BLOCK_LOG_FILE}:/usr/local/hive/consensus/datadir/blockchain/block_log
- hived-node-datadir:/usr/local/hive/consensus/datadir
volumes:
postgres-10-pgdata:
postgres-12-pgdata:
hived-node-datadir:
#!/usr/local/bin/python3 #!/usr/bin/env python
"""CLI service router""" """CLI service router"""
import os import os
import logging import logging
import time
from hive.conf import Conf from hive.conf import Conf
from hive.db.adapter import Db from hive.db.adapter import Db
from hive.utils.stats import PrometheusClient from hive.utils.stats import PrometheusClient
logging.basicConfig()
def setup_logging(conf):
"""Setup logging with timestamps"""
timestamp = conf.get('log_timestamp')
epoch = conf.get('log_epoch')
if timestamp and epoch:
datefmt='%Y-%m-%d %H:%M:%S'
timezone = time.strftime('%z')
fmt = '%(asctime)s.%(msecs)03d{} %(created).6f ' \
'%(levelname)s - %(name)s - %(message)s'.format(timezone)
logging.basicConfig(format=fmt, datefmt=datefmt)
if timestamp:
datefmt='%Y-%m-%d %H:%M:%S'
timezone = time.strftime('%z')
fmt = '%(asctime)s.%(msecs)03d{} ' \
'%(levelname)s - %(name)s - %(message)s'.format(timezone)
logging.basicConfig(format=fmt, datefmt=datefmt)
if epoch:
fmt = '%(created).6f %(levelname)s - %(name)s - %(message)s'
logging.basicConfig(format=fmt)
else:
fmt = '%(levelname)s - %(name)s - %(message)s'
logging.basicConfig(format=fmt)
def run(): def run():
"""Run the service specified in the `--mode` argument.""" """Run the service specified in the `--mode` argument."""
...@@ -17,6 +42,8 @@ def run(): ...@@ -17,6 +42,8 @@ def run():
mode = conf.mode() mode = conf.mode()
PrometheusClient( conf.get('prometheus_port') ) PrometheusClient( conf.get('prometheus_port') )
setup_logging(conf)
if mode == 'completion': if mode == 'completion':
conf.generate_completion() conf.generate_completion()
return return
...@@ -27,11 +54,11 @@ def run(): ...@@ -27,11 +54,11 @@ def run():
if pid_file_name is not None: if pid_file_name is not None:
fh = open(pid_file_name, 'w') fh = open(pid_file_name, 'w')
if fh is None: if fh is None:
print("Cannot write into specified pid_file: %s", pidpid_file_name) print("Cannot write into specified pid_file: %s", pid_file_name)
else: else:
pid = os.getpid() pid = os.getpid()
fh.write(str(pid)) fh.write(str(pid))
fh.close() fh.close()
if conf.get('test_profile'): if conf.get('test_profile'):
......
...@@ -54,6 +54,11 @@ class Conf(): ...@@ -54,6 +54,11 @@ class Conf():
add('--test-profile', type=strtobool, env_var='TEST_PROFILE', help='(debug) profile execution', default=False) add('--test-profile', type=strtobool, env_var='TEST_PROFILE', help='(debug) profile execution', default=False)
add('--log-virtual-op-calls', type=strtobool, env_var='LOG_VIRTUAL_OP_CALLS', help='(debug) log virtual op calls and responses', default=False) add('--log-virtual-op-calls', type=strtobool, env_var='LOG_VIRTUAL_OP_CALLS', help='(debug) log virtual op calls and responses', default=False)
# logging
add('--log-timestamp', help='Output timestamp in log', action='store_true')
add('--log-epoch', help='Output unix epoch in log', action='store_true')
add('--log-mask-sensitive-data', help='Mask sensitive data, e.g. passwords', action='store_true')
add('--pid-file', type=str, env_var='PID_FILE', help='Allows to dump current process pid into specified file', default=None) add('--pid-file', type=str, env_var='PID_FILE', help='Allows to dump current process pid into specified file', default=None)
add('--auto-http-server-port', nargs='+', type=int, help='Hivemind will listen on first available port from this range') add('--auto-http-server-port', nargs='+', type=int, help='Hivemind will listen on first available port from this range')
...@@ -80,8 +85,23 @@ class Conf(): ...@@ -80,8 +85,23 @@ class Conf():
root.error("Value error: {}".format(ex)) root.error("Value error: {}".format(ex))
exit(1) exit(1)
# Print command line args, but on continuous integration server
# hide db connection string.
from sys import argv from sys import argv
root.info("Used command line args: %s", " ".join(argv[1:])) if conf.get('log_mask_sensitive_data'):
my_args = []
upcoming_connection_string = False
for elem in argv[1:]:
if upcoming_connection_string:
upcoming_connection_string = False
my_args.append('MASKED')
continue
if elem == '--database-url':
upcoming_connection_string = True
my_args.append(elem)
root.info("Used command line args: %s", " ".join(my_args))
else:
root.info("Used command line args: %s", " ".join(argv[1:]))
# uncomment for full list of program args # uncomment for full list of program args
#args_list = ["--" + k + " " + str(v) for k,v in vars(args).items()] #args_list = ["--" + k + " " + str(v) for k,v in vars(args).items()]
......
...@@ -8,7 +8,6 @@ from time import perf_counter ...@@ -8,7 +8,6 @@ from time import perf_counter
import logging import logging
import sqlalchemy import sqlalchemy
from concurrent.futures import ThreadPoolExecutor
from hive.db.schema import (setup, reset_autovac, set_logged_table_attribute, build_metadata, from hive.db.schema import (setup, reset_autovac, set_logged_table_attribute, build_metadata,
build_metadata_community, teardown, DB_VERSION) build_metadata_community, teardown, DB_VERSION)
...@@ -95,11 +94,12 @@ class DbState: ...@@ -95,11 +94,12 @@ class DbState:
to_locate = [ to_locate = [
'hive_follows_ix5a', # (following, state, created_at, follower) 'hive_follows_ix5a', # (following, state, created_at, follower)
'hive_follows_ix5b', # (follower, state, created_at, following) 'hive_follows_ix5b', # (follower, state, created_at, following)
'hive_follows_block_num_idx',
'hive_follows_created_at_idx', 'hive_follows_created_at_idx',
'hive_posts_parent_id_idx', 'hive_posts_parent_id_idx',
'hive_posts_depth_idx', 'hive_posts_depth_idx',
'hive_posts_created_at_idx', 'hive_posts_created_at_author_id_idx',
'hive_posts_root_id_id_idx', 'hive_posts_root_id_id_idx',
'hive_posts_community_id_idx', 'hive_posts_community_id_idx',
...@@ -115,6 +115,8 @@ class DbState: ...@@ -115,6 +115,8 @@ class DbState:
'hive_posts_updated_at_idx', 'hive_posts_updated_at_idx',
'hive_posts_payout_plus_pending_payout_id_is_paidout_idx', 'hive_posts_payout_plus_pending_payout_id_is_paidout_idx',
'hive_posts_api_helper_author_s_permlink_idx',
'hive_votes_voter_id_last_update_idx', 'hive_votes_voter_id_last_update_idx',
'hive_votes_block_num_idx', 'hive_votes_block_num_idx',
...@@ -279,16 +281,6 @@ class DbState: ...@@ -279,16 +281,6 @@ class DbState:
time_start = perf_counter() time_start = perf_counter()
sql = """
select update_hot_and_trending_for_blocks({}, {})
""".format(last_imported_block, current_imported_block)
row = DbState.db().query_row(sql)
time_end = perf_counter()
log.info("[INIT] update_all_hot_and_tranding executed in %.4fs", time_end - time_start)
time_start = perf_counter()
update_active_starting_from_posts_on_block(last_imported_block, current_imported_block) update_active_starting_from_posts_on_block(last_imported_block, current_imported_block)
time_end = perf_counter() time_end = perf_counter()
...@@ -330,6 +322,14 @@ class DbState: ...@@ -330,6 +322,14 @@ class DbState:
time_end = perf_counter() time_end = perf_counter()
log.info("[INIT] update_communities_posts_and_rank executed in %.4fs", time_end - time_start) log.info("[INIT] update_communities_posts_and_rank executed in %.4fs", time_end - time_start)
time_start = perf_counter()
sql = """
SELECT update_posts_rshares({}, {});
""".format(last_imported_block, current_imported_block)
DbState.db().query_no_return(sql)
time_end = perf_counter()
log.info("[INIT] update_posts_rshares executed in %.4fs", time_end - time_start)
# Update a block num immediately # Update a block num immediately
DbState.db().query_no_return("UPDATE hive_state SET block_num = :block_num", block_num = current_imported_block) DbState.db().query_no_return("UPDATE hive_state SET block_num = :block_num", block_num = current_imported_block)
......
This diff is collapsed.
DROP FUNCTION IF EXISTS get_discussion
;
CREATE OR REPLACE FUNCTION get_discussion(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE
)
RETURNS TABLE
(
id hive_posts.id%TYPE, parent_id hive_posts.parent_id%TYPE, author hive_accounts.name%TYPE, permlink hive_permlink_data.permlink%TYPE,
title hive_post_data.title%TYPE, body hive_post_data.body%TYPE, category hive_category_data.category%TYPE, depth hive_posts.depth%TYPE,
promoted hive_posts.promoted%TYPE, payout hive_posts.payout%TYPE, pending_payout hive_posts.pending_payout%TYPE, payout_at hive_posts.payout_at%TYPE,
is_paidout hive_posts.is_paidout%TYPE, children hive_posts.children%TYPE, created_at hive_posts.created_at%TYPE, updated_at hive_posts.updated_at%TYPE,
rshares hive_posts_view.rshares%TYPE, abs_rshares hive_posts_view.abs_rshares%TYPE, json hive_post_data.json%TYPE, author_rep hive_accounts.reputation%TYPE,
is_hidden hive_posts.is_hidden%TYPE, is_grayed BOOLEAN, total_votes BIGINT, sc_trend hive_posts.sc_trend%TYPE,
acct_author_id hive_posts.author_id%TYPE, root_author hive_accounts.name%TYPE, root_permlink hive_permlink_data.permlink%TYPE,
parent_author hive_accounts.name%TYPE, parent_permlink_or_category hive_permlink_data.permlink%TYPE, allow_replies BOOLEAN,
allow_votes hive_posts.allow_votes%TYPE, allow_curation_rewards hive_posts.allow_curation_rewards%TYPE, url TEXT, root_title hive_post_data.title%TYPE,
beneficiaries hive_posts.beneficiaries%TYPE, max_accepted_payout hive_posts.max_accepted_payout%TYPE, percent_hbd hive_posts.percent_hbd%TYPE,
curator_payout_value hive_posts.curator_payout_value%TYPE
)
LANGUAGE plpgsql
AS
$function$
DECLARE
__post_id INT;
BEGIN
__post_id = find_comment_id( _author, _permlink, True );
RETURN QUERY
SELECT
hpv.id,
hpv.parent_id,
hpv.author,
hpv.permlink,
hpv.title,
hpv.body,
hpv.category,
hpv.depth,
hpv.promoted,
hpv.payout,
hpv.pending_payout,
hpv.payout_at,
hpv.is_paidout,
hpv.children,
hpv.created_at,
hpv.updated_at,
hpv.rshares,
hpv.abs_rshares,
hpv.json,
hpv.author_rep,
hpv.is_hidden,
hpv.is_grayed,
hpv.total_votes,
hpv.sc_trend,
hpv.author_id AS acct_author_id,
hpv.root_author,
hpv.root_permlink,
hpv.parent_author,
hpv.parent_permlink_or_category,
hpv.allow_replies,
hpv.allow_votes,
hpv.allow_curation_rewards,
hpv.url,
hpv.root_title,
hpv.beneficiaries,
hpv.max_accepted_payout,
hpv.percent_hbd,
hpv.curator_payout_value
FROM
(
WITH RECURSIVE child_posts (id, parent_id) AS
(
SELECT hp.id, hp.parent_id
FROM hive_posts hp
WHERE hp.id = __post_id
AND NOT hp.is_muted
UNION ALL
SELECT children.id, children.parent_id
FROM hive_posts children
JOIN child_posts ON children.parent_id = child_posts.id
WHERE children.counter_deleted = 0 AND NOT children.is_muted
)
SELECT hp2.id
FROM hive_posts hp2
JOIN child_posts cp ON cp.id = hp2.id
ORDER BY hp2.id
) ds
JOIN hive_posts_view hpv ON ds.id = hpv.id
ORDER BY ds.id
LIMIT 2000
;
END
$function$
;
DROP TYPE IF EXISTS database_api_post CASCADE;
CREATE TYPE database_api_post AS (
id INT,
community_id INT,
author VARCHAR(16),
permlink VARCHAR(255),
title VARCHAR(512),
body TEXT,
category VARCHAR(255),
depth SMALLINT,
promoted DECIMAL(10,3),
payout DECIMAL(10,3),
last_payout_at TIMESTAMP,
cashout_time TIMESTAMP,
is_paidout BOOLEAN,
children INT,
votes INT,
created_at TIMESTAMP,
updated_at TIMESTAMP,
rshares NUMERIC,
json TEXT,
is_hidden BOOLEAN,
is_grayed BOOLEAN,
total_votes BIGINT,
net_votes BIGINT,
total_vote_weight NUMERIC,
parent_author VARCHAR(16),
parent_permlink_or_category VARCHAR(255),
curator_payout_value VARCHAR(30),
root_author VARCHAR(16),
root_permlink VARCHAR(255),
max_accepted_payout VARCHAR(30),
percent_hbd INT,
allow_replies BOOLEAN,
allow_votes BOOLEAN,
allow_curation_rewards BOOLEAN,
beneficiaries JSON,
url TEXT,
root_title VARCHAR(512),
abs_rshares NUMERIC,
active TIMESTAMP,
author_rewards BIGINT
)
;
DROP FUNCTION IF EXISTS list_comments_by_permlink(character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION list_comments_by_permlink(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
LANGUAGE sql
STABLE
AS
$function$
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM
hive_posts_view hp
INNER JOIN
(
SELECT hp1.id
FROM
hive_posts_api_helper hp1
INNER JOIN hive_posts hp2 ON hp2.id = hp1.id
WHERE
hp2.counter_deleted = 0 AND NOT hp2.is_muted AND hp1.id != 0
AND hp1.author_s_permlink >= _author || '/' || _permlink
ORDER BY
hp1.author_s_permlink
LIMIT
_limit
) ds ON ds.id = hp.id
ORDER BY
hp.author, hp.permlink
$function$
;
DROP FUNCTION IF EXISTS list_comments_by_cashout_time(timestamp, character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION list_comments_by_cashout_time(
in _cashout_time timestamp,
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
AS
$function$
DECLARE
__post_id INT;
BEGIN
__post_id = find_comment_id(_author,_permlink, True);
RETURN QUERY
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM
hive_posts_view hp
INNER JOIN
(
SELECT
hp1.id
FROM
hive_posts hp1
WHERE
hp1.counter_deleted = 0
AND NOT hp1.is_muted
AND hp1.cashout_time > _cashout_time
OR hp1.cashout_time = _cashout_time
AND hp1.id >= __post_id AND hp1.id != 0
ORDER BY
hp1.cashout_time ASC,
hp1.id ASC
LIMIT
_limit
) ds ON ds.id = hp.id
ORDER BY
hp.cashout_time ASC,
hp.id ASC
;
END
$function$
LANGUAGE plpgsql
;
DROP FUNCTION IF EXISTS list_comments_by_root(character varying, character varying, character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION list_comments_by_root(
in _root_author hive_accounts.name%TYPE,
in _root_permlink hive_permlink_data.permlink%TYPE,
in _start_post_author hive_accounts.name%TYPE,
in _start_post_permlink hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
AS
$function$
DECLARE
__root_id INT;
__post_id INT;
BEGIN
__root_id = find_comment_id(_root_author, _root_permlink, True);
__post_id = find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM
hive_posts_view hp
INNER JOIN
(
SELECT
hp2.id
FROM
hive_posts hp2
WHERE
hp2.counter_deleted = 0
AND NOT hp2.is_muted
AND hp2.root_id = __root_id
AND hp2.id >= __post_id
ORDER BY
hp2.id ASC
LIMIT _limit
) ds on hp.id = ds.id
ORDER BY
hp.id
;
END
$function$
LANGUAGE plpgsql
;
DROP FUNCTION IF EXISTS list_comments_by_parent(character varying, character varying, character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION list_comments_by_parent(
in _parent_author hive_accounts.name%TYPE,
in _parent_permlink hive_permlink_data.permlink%TYPE,
in _start_post_author hive_accounts.name%TYPE,
in _start_post_permlink hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
AS $function$
DECLARE
__post_id INT;
__parent_id INT;
BEGIN
__parent_id = find_comment_id(_parent_author, _parent_permlink, True);
__post_id = find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM
hive_posts_view hp
INNER JOIN
(
SELECT hp1.id FROM
hive_posts hp1
WHERE
hp1.counter_deleted = 0
AND NOT hp1.is_muted
AND hp1.parent_id = __parent_id
AND hp1.id >= __post_id
ORDER BY
hp1.id ASC
LIMIT
_limit
) ds ON ds.id = hp.id
ORDER BY
hp.id
;
END
$function$
LANGUAGE plpgsql
;
DROP FUNCTION IF EXISTS list_comments_by_last_update(character varying, timestamp, character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION list_comments_by_last_update(
in _parent_author hive_accounts.name%TYPE,
in _updated_at hive_posts.updated_at%TYPE,
in _start_post_author hive_accounts.name%TYPE,
in _start_post_permlink hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
AS
$function$
DECLARE
__post_id INT;
__parent_author_id INT;
BEGIN
__parent_author_id = find_account_id(_parent_author, True);
__post_id = find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM
hive_posts_view hp
INNER JOIN
(
SELECT
hp1.id
FROM
hive_posts hp1
JOIN
hive_posts hp2 ON hp1.parent_id = hp2.id
WHERE
hp1.counter_deleted = 0
AND NOT hp1.is_muted
AND hp2.author_id = __parent_author_id
AND (
hp1.updated_at < _updated_at
OR hp1.updated_at = _updated_at
AND hp1.id >= __post_id
)
ORDER BY
hp1.updated_at DESC,
hp1.id ASC
LIMIT
_limit
) ds ON ds.id = hp.id
ORDER BY
hp.updated_at DESC,
hp.id ASC
;
END
$function$
LANGUAGE plpgsql
;
DROP FUNCTION IF EXISTS list_comments_by_author_last_update(character varying, timestamp, character varying, character varying, int)
;
CREATE OR REPLACE FUNCTION list_comments_by_author_last_update(
in _author hive_accounts.name%TYPE,
in _updated_at hive_posts.updated_at%TYPE,
in _start_post_author hive_accounts.name%TYPE,
in _start_post_permlink hive_permlink_data.permlink%TYPE,
in _limit INT)
RETURNS SETOF database_api_post
AS
$function$
DECLARE
__author_id INT;
__post_id INT;
BEGIN
__author_id = find_account_id(_author, True);
__post_id = find_comment_id(_start_post_author, _start_post_permlink, True);
RETURN QUERY
SELECT
hp.id, hp.community_id, hp.author, hp.permlink, hp.title, hp.body,
hp.category, hp.depth, hp.promoted, hp.payout, hp.last_payout_at, hp.cashout_time, hp.is_paidout,
hp.children, hp.votes, hp.created_at, hp.updated_at, hp.rshares, hp.json,
hp.is_hidden, hp.is_grayed, hp.total_votes, hp.net_votes, hp.total_vote_weight,
hp.parent_author, hp.parent_permlink_or_category, hp.curator_payout_value, hp.root_author, hp.root_permlink,
hp.max_accepted_payout, hp.percent_hbd, hp.allow_replies, hp.allow_votes,
hp.allow_curation_rewards, hp.beneficiaries, hp.url, hp.root_title, hp.abs_rshares,
hp.active, hp.author_rewards
FROM
hive_posts_view hp
INNER JOIN
(
SELECT
hp1.id
FROM
hive_posts hp1
WHERE
hp1.counter_deleted = 0
AND NOT hp1.is_muted
AND hp1.author_id = __author_id
AND (
hp1.updated_at < _updated_at
OR hp1.updated_at = _updated_at
AND hp1.id >= __post_id
)
ORDER BY
hp1.updated_at DESC,
hp1.id ASC
LIMIT
_limit
) ds ON ds.id = hp.id
ORDER BY
hp.updated_at DESC,
hp.id ASC
;
END
$function$
LANGUAGE plpgsql
;
DROP TYPE IF EXISTS database_api_vote CASCADE;
CREATE TYPE database_api_vote AS (
id BIGINT,
voter VARCHAR(16),
author VARCHAR(16),
permlink VARCHAR(255),
weight NUMERIC,
rshares BIGINT,
percent INT,
last_update TIMESTAMP,
num_changes INT,
reputation BIGINT
);
DROP FUNCTION IF EXISTS find_votes( character varying, character varying, int )
;
CREATE OR REPLACE FUNCTION public.find_votes
(
in _AUTHOR hive_accounts.name%TYPE,
in _PERMLINK hive_permlink_data.permlink%TYPE,
in _LIMIT INT
)
RETURNS SETOF database_api_vote
LANGUAGE 'plpgsql'
AS
$function$
DECLARE _POST_ID INT;
BEGIN
_POST_ID = find_comment_id( _AUTHOR, _PERMLINK, True);
RETURN QUERY
(
SELECT
v.id,
v.voter,
v.author,
v.permlink,
v.weight,
v.rshares,
v.percent,
v.last_update,
v.num_changes,
v.reputation
FROM
hive_votes_view v
WHERE
v.post_id = _POST_ID
ORDER BY
voter_id
LIMIT _LIMIT
);
END
$function$;
DROP FUNCTION IF EXISTS list_votes_by_voter_comment( character varying, character varying, character varying, int )
;
CREATE OR REPLACE FUNCTION public.list_votes_by_voter_comment
(
in _VOTER hive_accounts.name%TYPE,
in _AUTHOR hive_accounts.name%TYPE,
in _PERMLINK hive_permlink_data.permlink%TYPE,
in _LIMIT INT
)
RETURNS SETOF database_api_vote
LANGUAGE 'plpgsql'
AS
$function$
DECLARE __voter_id INT;
DECLARE __post_id INT;
BEGIN
__voter_id = find_account_id( _VOTER, True );
__post_id = find_comment_id( _AUTHOR, _PERMLINK, True );
RETURN QUERY
(
SELECT
v.id,
v.voter,
v.author,
v.permlink,
v.weight,
v.rshares,
v.percent,
v.last_update,
v.num_changes,
v.reputation
FROM
hive_votes_view v
WHERE
v.voter_id = __voter_id
AND v.post_id >= __post_id
ORDER BY
v.post_id
LIMIT _LIMIT
);
END
$function$;
DROP FUNCTION IF EXISTS list_votes_by_comment_voter( character varying, character varying, character varying, int )
;
CREATE OR REPLACE FUNCTION public.list_votes_by_comment_voter
(
in _VOTER hive_accounts.name%TYPE,
in _AUTHOR hive_accounts.name%TYPE,
in _PERMLINK hive_permlink_data.permlink%TYPE,
in _LIMIT INT
)
RETURNS SETOF database_api_vote
LANGUAGE 'plpgsql'
AS
$function$
DECLARE __voter_id INT;
DECLARE __post_id INT;
BEGIN
__voter_id = find_account_id( _VOTER, _VOTER != '' ); -- voter is optional
__post_id = find_comment_id( _AUTHOR, _PERMLINK, True );
RETURN QUERY
(
SELECT
v.id,
v.voter,
v.author,
v.permlink,
v.weight,
v.rshares,
v.percent,
v.last_update,
v.num_changes,
v.reputation
FROM
hive_votes_view v
WHERE
v.post_id = __post_id
AND v.voter_id >= __voter_id
ORDER BY
v.voter_id
LIMIT _LIMIT
);
END
$function$;
DROP VIEW IF EXISTS hive_accounts_info_view; DROP VIEW IF EXISTS hive_accounts_info_view;
CREATE OR REPLACE VIEW public.hive_accounts_info_view CREATE OR REPLACE VIEW public.hive_accounts_info_view
AS AS
SELECT ha.id, SELECT ha.id,
ha.name, ha.name,
COALESCE(post_data.post_count, 0::bigint) AS post_count, COALESCE(posts.post_count, 0::bigint) AS post_count,
ha.created_at, ha.created_at,
( SELECT GREATEST(ha.created_at, ( SELECT GREATEST(ha.created_at,
COALESCE(post_data.latest_post, '1970-01-01 00:00:00'::timestamp without time zone), COALESCE(latest_post.latest_post, '1970-01-01 00:00:00'::timestamp without time zone),
COALESCE(limited_votes.latest_vote, whole_votes.latest_vote, '1970-01-01 00:00:00'::timestamp without time zone)) COALESCE(limited_votes.latest_vote, whole_votes.latest_vote, '1970-01-01 00:00:00'::timestamp without time zone))
AS "greatest" AS "greatest"
) AS active_at, ) AS active_at,
ha.reputation, ha.reputation,
ha.rank, ha.rank,
ha.following, ha.following,
ha.followers, ha.followers,
ha.lastread_at, ha.lastread_at,
ha.posting_json_metadata, ha.posting_json_metadata,
ha.json_metadata, ha.json_metadata
ha.blacklist_description, FROM
ha.muted_list_description (
FROM SELECT max(hb.num) - 1200 * 24 * 7 AS block_limit FROM hive_blocks hb
( ) bl,
SELECT max(hb.num) - 1200 * 24 * 7 AS block_limit FROM hive_blocks hb hive_accounts ha
) bl, LEFT JOIN LATERAL
hive_accounts ha (
LEFT JOIN LATERAL SELECT COUNT(1) AS post_count
( FROM hive_posts hp
SELECT count(1) AS post_count, max(hp.created_at) AS latest_post, max(hp.block_num) AS latest_post_block WHERE hp.counter_deleted = 0 and hp.author_id = ha.id
FROM hive_posts hp ) posts ON true
WHERE hp.author_id = ha.id LEFT JOIN lateral
) post_data ON true (
LEFT JOIN LATERAL --- let's first try to find a last vote in last 7 days SELECT hp1.created_at AS latest_post
( FROM hive_posts hp1
SELECT hv.last_update AS latest_vote WHERE hp1.counter_deleted = 0 and hp1.author_id = ha.id
FROM hive_votes hv ORDER BY hp1.created_at DESC, hp1.author_id DESC LIMIT 1
WHERE ha.id = hv.voter_id AND hv.block_num >= bl.block_limit AND hv.block_num >= COALESCE(post_data.latest_post_block, 0) ) latest_post on true
ORDER BY hv.block_num DESC LEFT JOIN LATERAL --- let's first try to find a last vote in last 7 days
LIMIT 1 (
) limited_votes ON true SELECT hv.last_update AS latest_vote
LEFT JOIN LATERAL -- this is a fallback to case when was no vote in last 7 days FROM hive_votes hv
( WHERE ha.id = hv.voter_id AND hv.block_num >= bl.block_limit --AND hv.block_num >= COALESCE(post_data.latest_post_block, 0)
SELECT hvf.last_update AS latest_vote ORDER BY hv.block_num DESC
FROM hive_votes hvf LIMIT 1
WHERE limited_votes.latest_vote IS NULL AND hvf.voter_id = ha.id ) limited_votes ON true
ORDER BY hvf.voter_id DESC, hvf.last_update DESC LEFT JOIN LATERAL -- this is a fallback to case when was no vote in last 7 days
LIMIT 1 (
) whole_votes ON true SELECT hvf.last_update AS latest_vote
; FROM hive_votes hvf
WHERE limited_votes.latest_vote IS NULL AND hvf.voter_id = ha.id
ORDER BY hvf.voter_id DESC, hvf.last_update DESC
LIMIT 1
) whole_votes ON true
;
DROP VIEW IF EXISTS public.hive_accounts_view;
CREATE OR REPLACE VIEW public.hive_accounts_view
AS
SELECT id,
name,
created_at,
reputation,
is_implicit,
followers,
following,
rank,
lastread_at,
posting_json_metadata,
json_metadata,
( reputation <= -464800000000 ) is_grayed -- biggest number where rep_log10 gives < 1.0
FROM hive_accounts
;
DROP FUNCTION if exists process_hive_post_operation(character varying,character varying,character varying,character varying,timestamp without time zone,timestamp without time zone)
;
CREATE OR REPLACE FUNCTION process_hive_post_operation(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE,
in _parent_author hive_accounts.name%TYPE,
in _parent_permlink hive_permlink_data.permlink%TYPE,
in _date hive_posts.created_at%TYPE,
in _community_support_start_date hive_posts.created_at%TYPE,
in _block_num hive_posts.block_num%TYPE)
RETURNS TABLE (is_new_post boolean, id hive_posts.id%TYPE, author_id hive_posts.author_id%TYPE, permlink_id hive_posts.permlink_id%TYPE,
post_category hive_category_data.category%TYPE, parent_id hive_posts.parent_id%TYPE, community_id hive_posts.community_id%TYPE,
is_valid hive_posts.is_valid%TYPE, is_muted hive_posts.is_muted%TYPE, depth hive_posts.depth%TYPE)
LANGUAGE plpgsql
AS
$function$
BEGIN
INSERT INTO hive_permlink_data
(permlink)
values
(
_permlink
)
ON CONFLICT DO NOTHING
;
if _parent_author != '' THEN
RETURN QUERY INSERT INTO hive_posts as hp
(parent_id, depth, community_id, category_id,
root_id, is_muted, is_valid,
author_id, permlink_id, created_at, updated_at, sc_hot, sc_trend, active, payout_at, cashout_time, counter_deleted, block_num, block_num_created)
SELECT php.id AS parent_id, php.depth + 1 AS depth,
(CASE
WHEN _date > _community_support_start_date THEN
COALESCE(php.community_id, (select hc.id from hive_communities hc where hc.name = _parent_permlink))
ELSE NULL
END) AS community_id,
COALESCE(php.category_id, (select hcg.id from hive_category_data hcg where hcg.category = _parent_permlink)) AS category_id,
(CASE(php.root_id)
WHEN 0 THEN php.id
ELSE php.root_id
END) AS root_id,
php.is_muted AS is_muted, php.is_valid AS is_valid,
ha.id AS author_id, hpd.id AS permlink_id, _date AS created_at,
_date AS updated_at,
calculate_time_part_of_hot(_date) AS sc_hot,
calculate_time_part_of_trending(_date) AS sc_trend,
_date AS active, (_date + INTERVAL '7 days') AS payout_at, (_date + INTERVAL '7 days') AS cashout_time, 0,
_block_num as block_num, _block_num as block_num_created
FROM hive_accounts ha,
hive_permlink_data hpd,
hive_posts php
INNER JOIN hive_accounts pha ON pha.id = php.author_id
INNER JOIN hive_permlink_data phpd ON phpd.id = php.permlink_id
WHERE pha.name = _parent_author AND phpd.permlink = _parent_permlink AND
ha.name = _author AND hpd.permlink = _permlink AND php.counter_deleted = 0
ON CONFLICT ON CONSTRAINT hive_posts_ux1 DO UPDATE SET
--- During post update it is disallowed to change: parent-post, category, community-id
--- then also depth, is_valid and is_muted is impossible to change
--- post edit part
updated_at = _date,
active = _date,
block_num = _block_num
RETURNING (xmax = 0) as is_new_post, hp.id, hp.author_id, hp.permlink_id, (SELECT hcd.category FROM hive_category_data hcd WHERE hcd.id = hp.category_id) as post_category, hp.parent_id, hp.community_id, hp.is_valid, hp.is_muted, hp.depth
;
ELSE
INSERT INTO hive_category_data
(category)
VALUES (_parent_permlink)
ON CONFLICT (category) DO NOTHING
;
RETURN QUERY INSERT INTO hive_posts as hp
(parent_id, depth, community_id, category_id,
root_id, is_muted, is_valid,
author_id, permlink_id, created_at, updated_at, sc_hot, sc_trend, active, payout_at, cashout_time, counter_deleted, block_num, block_num_created)
SELECT 0 AS parent_id, 0 AS depth,
(CASE
WHEN _date > _community_support_start_date THEN
(select hc.id FROM hive_communities hc WHERE hc.name = _parent_permlink)
ELSE NULL
END) AS community_id,
(SELECT hcg.id FROM hive_category_data hcg WHERE hcg.category = _parent_permlink) AS category_id,
0 as root_id, -- will use id as root one if no parent
false AS is_muted, true AS is_valid,
ha.id AS author_id, hpd.id AS permlink_id, _date AS created_at,
_date AS updated_at,
calculate_time_part_of_hot(_date) AS sc_hot,
calculate_time_part_of_trending(_date) AS sc_trend,
_date AS active, (_date + INTERVAL '7 days') AS payout_at, (_date + INTERVAL '7 days') AS cashout_time, 0
, _block_num as block_num, _block_num as block_num_created
FROM hive_accounts ha,
hive_permlink_data hpd
WHERE ha.name = _author and hpd.permlink = _permlink
ON CONFLICT ON CONSTRAINT hive_posts_ux1 DO UPDATE SET
--- During post update it is disallowed to change: parent-post, category, community-id
--- then also depth, is_valid and is_muted is impossible to change
--- post edit part
updated_at = _date,
active = _date,
block_num = _block_num
RETURNING (xmax = 0) as is_new_post, hp.id, hp.author_id, hp.permlink_id, _parent_permlink as post_category, hp.parent_id, hp.community_id, hp.is_valid, hp.is_muted, hp.depth
;
END IF;
END
$function$
;
DROP FUNCTION if exists delete_hive_post(character varying,character varying,character varying, integer)
;
CREATE OR REPLACE FUNCTION delete_hive_post(
in _author hive_accounts.name%TYPE,
in _permlink hive_permlink_data.permlink%TYPE,
in _block_num hive_blocks.num%TYPE)
RETURNS TABLE (id hive_posts.id%TYPE, depth hive_posts.depth%TYPE)
LANGUAGE plpgsql
AS
$function$
BEGIN
RETURN QUERY UPDATE hive_posts AS hp
SET counter_deleted =
(
SELECT max( hps.counter_deleted ) + 1
FROM hive_posts hps
INNER JOIN hive_accounts ha ON hps.author_id = ha.id
INNER JOIN hive_permlink_data hpd ON hps.permlink_id = hpd.id
WHERE ha.name = _author AND hpd.permlink = _permlink
)
, block_num = _block_num
FROM hive_posts hp1
INNER JOIN hive_accounts ha ON hp1.author_id = ha.id
INNER JOIN hive_permlink_data hpd ON hp1.permlink_id = hpd.id
WHERE hp.id = hp1.id AND ha.name = _author AND hpd.permlink = _permlink AND hp1.counter_deleted = 0
RETURNING hp.id, hp.depth;
END
$function$
;
DROP VIEW if exists public.hive_posts_base_view cascade; DROP VIEW IF EXISTS public.hive_posts_base_view cascade;
CREATE OR REPLACE VIEW public.hive_posts_base_view CREATE OR REPLACE VIEW public.hive_posts_base_view
AS AS
SELECT SELECT
hp.block_num, hp.block_num
hp.id, , hp.id
hp.author_id, , hp.author_id
hp.permlink_id, , hp.permlink_id
hp.payout, , hp.payout
hp.pending_payout, , hp.pending_payout
COALESCE(( SELECT sum( , hp.abs_rshares
CASE v.rshares >= 0 , hp.vote_rshares AS rshares
WHEN true THEN v.rshares FROM hive_posts hp
ELSE - v.rshares
END) AS sum
FROM hive_votes v
WHERE v.post_id = hp.id AND NOT v.rshares = 0
GROUP BY v.post_id), 0::numeric) AS abs_rshares
FROM hive_posts hp
; ;
DROP VIEW IF EXISTS public.hive_posts_pp_view CASCADE; DROP VIEW IF EXISTS public.hive_posts_pp_view CASCADE;
......
DROP VIEW IF EXISTS public.hive_posts_view;
CREATE OR REPLACE VIEW public.hive_posts_view
AS
SELECT hp.id,
hp.community_id,
hp.root_id,
hp.parent_id,
ha_a.name AS author,
hp.active,
hp.author_rewards,
hp.author_id,
hpd_p.permlink,
hpd.title,
hpd.body,
hpd.img_url,
hpd.preview,
hcd.category,
hp.category_id,
hp.depth,
hp.promoted,
hp.payout,
hp.pending_payout,
hp.payout_at,
hp.last_payout_at,
hp.cashout_time,
hp.is_paidout,
hp.children,
0 AS votes,
0 AS active_votes,
hp.created_at,
hp.updated_at,
hp.vote_rshares AS rshares,
hp.abs_rshares AS abs_rshares,
COALESCE(
(
SELECT COUNT( 1 )
FROM hive_votes v
WHERE v.post_id = hp.id AND v.is_effective
GROUP BY v.post_id
), 0
) AS total_votes,
COALESCE(
(
SELECT SUM( CASE v.rshares > 0 WHEN True THEN 1 ELSE -1 END )
FROM hive_votes v
WHERE v.post_id = hp.id AND NOT v.rshares = 0
GROUP BY v.post_id
), 0
) AS net_votes,
hpd.json,
ha_a.reputation AS author_rep,
hp.is_hidden,
ha_a.is_grayed,
hp.total_vote_weight,
ha_pp.name AS parent_author,
ha_pp.id AS parent_author_id,
( CASE hp.depth > 0
WHEN True THEN hpd_pp.permlink
ELSE hcd.category
END ) AS parent_permlink_or_category,
hp.curator_payout_value,
ha_rp.name AS root_author,
hpd_rp.permlink AS root_permlink,
rcd.category as root_category,
hp.max_accepted_payout,
hp.percent_hbd,
True AS allow_replies,
hp.allow_votes,
hp.allow_curation_rewards,
hp.beneficiaries,
CONCAT('/', rcd.category, '/@', ha_rp.name, '/', hpd_rp.permlink,
CASE (rp.id)
WHEN hp.id THEN ''
ELSE CONCAT('#@', ha_a.name, '/', hpd_p.permlink)
END
) AS url,
rpd.title AS root_title,
hp.sc_trend,
hp.sc_hot,
hp.is_pinned,
hp.is_muted,
hp.is_nsfw,
hp.is_valid,
hr.title AS role_title,
hr.role_id AS role_id,
hc.title AS community_title,
hc.name AS community_name,
hp.block_num
FROM hive_posts hp
JOIN hive_posts pp ON pp.id = hp.parent_id
JOIN hive_posts rp ON rp.id = hp.root_id
JOIN hive_accounts_view ha_a ON ha_a.id = hp.author_id
JOIN hive_permlink_data hpd_p ON hpd_p.id = hp.permlink_id
JOIN hive_post_data hpd ON hpd.id = hp.id
JOIN hive_accounts ha_pp ON ha_pp.id = pp.author_id
JOIN hive_permlink_data hpd_pp ON hpd_pp.id = pp.permlink_id
JOIN hive_accounts ha_rp ON ha_rp.id = rp.author_id
JOIN hive_permlink_data hpd_rp ON hpd_rp.id = rp.permlink_id
JOIN hive_post_data rpd ON rpd.id = rp.id
JOIN hive_category_data hcd ON hcd.id = hp.category_id
JOIN hive_category_data rcd ON rcd.id = rp.category_id
LEFT JOIN hive_communities hc ON hp.community_id = hc.id
LEFT JOIN hive_roles hr ON hp.author_id = hr.account_id AND hp.community_id = hr.community_id
WHERE hp.counter_deleted = 0
;
DROP VIEW IF EXISTS hive_votes_view
;
CREATE OR REPLACE VIEW hive_votes_view
AS
SELECT
hv.id,
hv.voter_id as voter_id,
ha_a.name as author,
hpd.permlink as permlink,
vote_percent as percent,
ha_v.reputation as reputation,
rshares,
last_update,
ha_v.name as voter,
weight,
num_changes,
hv.permlink_id as permlink_id,
post_id,
is_effective
FROM
hive_votes hv
INNER JOIN hive_accounts ha_v ON ha_v.id = hv.voter_id
INNER JOIN hive_accounts ha_a ON ha_a.id = hv.author_id
INNER JOIN hive_permlink_data hpd ON hpd.id = hv.permlink_id
;
...@@ -78,8 +78,8 @@ END; ...@@ -78,8 +78,8 @@ END;
$BODY$ $BODY$
; ;
DROP FUNCTION IF EXISTS public.calculate_rhsares_part_of_hot_and_trend(_rshares hive_votes.rshares%TYPE) CASCADE; DROP FUNCTION IF EXISTS public.calculate_rhsares_part_of_hot_and_trend(_rshares hive_posts.vote_rshares%TYPE) CASCADE;
CREATE OR REPLACE FUNCTION public.calculate_rhsares_part_of_hot_and_trend(_rshares hive_votes.rshares%TYPE) CREATE OR REPLACE FUNCTION public.calculate_rhsares_part_of_hot_and_trend(_rshares hive_posts.vote_rshares%TYPE)
RETURNS double precision RETURNS double precision
LANGUAGE 'plpgsql' LANGUAGE 'plpgsql'
IMMUTABLE IMMUTABLE
...@@ -97,9 +97,9 @@ END; ...@@ -97,9 +97,9 @@ END;
$BODY$ $BODY$
; ;
DROP FUNCTION IF EXISTS public.calculate_hot(hive_votes.rshares%TYPE, hive_posts.created_at%TYPE); DROP FUNCTION IF EXISTS public.calculate_hot(hive_posts.vote_rshares%TYPE, hive_posts.created_at%TYPE);
CREATE OR REPLACE FUNCTION public.calculate_hot( CREATE OR REPLACE FUNCTION public.calculate_hot(
_rshares hive_votes.rshares%TYPE, _rshares hive_posts.vote_rshares%TYPE,
_post_created_at hive_posts.created_at%TYPE) _post_created_at hive_posts.created_at%TYPE)
RETURNS hive_posts.sc_hot%TYPE RETURNS hive_posts.sc_hot%TYPE
LANGUAGE 'plpgsql' LANGUAGE 'plpgsql'
...@@ -111,9 +111,9 @@ END; ...@@ -111,9 +111,9 @@ END;
$BODY$ $BODY$
; ;
DROP FUNCTION IF EXISTS public.calculate_tranding(hive_votes.rshares%TYPE, hive_posts.created_at%TYPE); DROP FUNCTION IF EXISTS public.calculate_tranding(hive_posts.vote_rshares%TYPE, hive_posts.created_at%TYPE);
CREATE OR REPLACE FUNCTION public.calculate_tranding( CREATE OR REPLACE FUNCTION public.calculate_tranding(
_rshares hive_votes.rshares%TYPE, _rshares hive_posts.vote_rshares%TYPE,
_post_created_at hive_posts.created_at%TYPE) _post_created_at hive_posts.created_at%TYPE)
RETURNS hive_posts.sc_trend%TYPE RETURNS hive_posts.sc_trend%TYPE
LANGUAGE 'plpgsql' LANGUAGE 'plpgsql'
...@@ -124,43 +124,3 @@ BEGIN ...@@ -124,43 +124,3 @@ BEGIN
END; END;
$BODY$ $BODY$
; ;
DROP FUNCTION IF EXISTS public.update_hot_and_trending_for_blocks;
CREATE OR REPLACE FUNCTION public.update_hot_and_trending_for_blocks(
_first_block integer,
_last_block integer)
RETURNS void
LANGUAGE 'plpgsql'
COST 100
VOLATILE
AS $BODY$
BEGIN
UPDATE hive_posts hp
SET
sc_hot = calculate_hot( votes.rshares, hp.created_at)
, sc_trend = calculate_tranding( votes.rshares, hp.created_at)
FROM
(
SELECT
hv1.post_id
,CAST( SUM(hv1.rshares) as BIGINT) as rshares
FROM
hive_votes hv1
JOIN
(
SELECT
hv.post_id
FROM
hive_votes hv
WHERE hv.block_num >= _first_block AND hv.block_num <= _last_block
GROUP BY hv.post_id
) as filtered_votes ON hv1.post_id = filtered_votes.post_id
GROUP BY hv1.post_id
) as votes
WHERE
hp.is_paidout = False
AND votes.post_id = hp.id;
END
$BODY$
;
...@@ -24,6 +24,7 @@ $BODY$ ...@@ -24,6 +24,7 @@ $BODY$
DECLARE DECLARE
__account_id INT := 0; __account_id INT := 0;
__last_read_at TIMESTAMP; __last_read_at TIMESTAMP;
__limit_block hive_blocks.num%TYPE = block_before_head( '90 days' );
BEGIN BEGIN
__account_id = find_account_id( _account, True ); __account_id = find_account_id( _account, True );
...@@ -34,8 +35,8 @@ BEGIN ...@@ -34,8 +35,8 @@ BEGIN
RETURN QUERY SELECT RETURN QUERY SELECT
__last_read_at as lastread_at, __last_read_at as lastread_at,
count(1) as unread count(1) as unread
FROM hive_raw_notifications_view hnv FROM hive_raw_notifications_view hnv
WHERE hnv.dst = __account_id AND hnv.created_at > __last_read_at AND hnv.score >= _minimum_score WHERE hnv.dst = __account_id AND hnv.block_num > __limit_block AND hnv.created_at > __last_read_at AND hnv.score >= _minimum_score
; ;
END END
$BODY$ $BODY$
...@@ -48,12 +49,13 @@ CREATE OR REPLACE FUNCTION public.account_notifications( ...@@ -48,12 +49,13 @@ CREATE OR REPLACE FUNCTION public.account_notifications(
_min_score smallint, _min_score smallint,
_last_id bigint, _last_id bigint,
_limit smallint) _limit smallint)
RETURNS SETOF notification RETURNS SETOF notification
LANGUAGE 'plpgsql' LANGUAGE 'plpgsql'
STABLE STABLE
AS $BODY$ AS $BODY$
DECLARE DECLARE
__account_id INT; __account_id INT;
__limit_block hive_blocks.num%TYPE = block_before_head( '90 days' );
BEGIN BEGIN
__account_id = find_account_id( _account, True ); __account_id = find_account_id( _account, True );
RETURN QUERY SELECT RETURN QUERY SELECT
...@@ -72,7 +74,7 @@ BEGIN ...@@ -72,7 +74,7 @@ BEGIN
( (
select nv.id, nv.type_id, nv.created_at, nv.src, nv.dst, nv.dst_post_id, nv.score, nv.community, nv.community_title, nv.payload select nv.id, nv.type_id, nv.created_at, nv.src, nv.dst, nv.dst_post_id, nv.score, nv.community, nv.community_title, nv.payload
from hive_raw_notifications_view nv from hive_raw_notifications_view nv
WHERE nv.dst = __account_id AND nv.score >= _min_score AND ( _last_id = 0 OR nv.id < _last_id ) WHERE nv.dst = __account_id AND nv.block_num > __limit_block AND nv.score >= _min_score AND ( _last_id = 0 OR nv.id < _last_id )
ORDER BY nv.id DESC ORDER BY nv.id DESC
LIMIT _limit LIMIT _limit
) hnv ) hnv
...@@ -82,9 +84,7 @@ BEGIN ...@@ -82,9 +84,7 @@ BEGIN
join hive_accounts hd on hd.id = hnv.dst join hive_accounts hd on hd.id = hnv.dst
join hive_permlink_data hpd on hp.permlink_id = hpd.id join hive_permlink_data hpd on hp.permlink_id = hpd.id
ORDER BY hnv.id DESC ORDER BY hnv.id DESC
LIMIT _limit LIMIT _limit;
;
END END
$BODY$; $BODY$;
...@@ -96,6 +96,7 @@ AS ...@@ -96,6 +96,7 @@ AS
$function$ $function$
DECLARE DECLARE
__post_id INT; __post_id INT;
__limit_block hive_blocks.num%TYPE = block_before_head( '90 days' );
BEGIN BEGIN
__post_id = find_comment_id(_author, _permlink, True); __post_id = find_comment_id(_author, _permlink, True);
RETURN QUERY SELECT RETURN QUERY SELECT
...@@ -114,7 +115,7 @@ BEGIN ...@@ -114,7 +115,7 @@ BEGIN
( (
SELECT nv.id, nv.type_id, nv.created_at, nv.src, nv.dst, nv.dst_post_id, nv.score, nv.community, nv.community_title, nv.payload SELECT nv.id, nv.type_id, nv.created_at, nv.src, nv.dst, nv.dst_post_id, nv.score, nv.community, nv.community_title, nv.payload
FROM hive_raw_notifications_view nv FROM hive_raw_notifications_view nv
WHERE nv.post_id = __post_id AND nv.score >= _min_score AND ( _last_id = 0 OR nv.id < _last_id ) WHERE nv.post_id = __post_id AND nv.block_num > __limit_block AND nv.score >= _min_score AND ( _last_id = 0 OR nv.id < _last_id )
ORDER BY nv.id DESC ORDER BY nv.id DESC
LIMIT _limit LIMIT _limit
) hnv ) hnv
......
DROP VIEW IF EXISTS public.hive_accounts_rank_view CASCADE; DROP VIEW IF EXISTS public.hive_accounts_rank_view CASCADE;
CREATE OR REPLACE VIEW public.hive_accounts_rank_view CREATE OR REPLACE VIEW public.hive_accounts_rank_view
AS AS
SELECT rank.id, SELECT rank.id,
CASE CASE
WHEN rank."position" < 200 THEN 70 WHEN rank."position" < 200 THEN 70
WHEN rank."position" < 1000 THEN 60 WHEN rank."position" < 1000 THEN 60
WHEN rank."position" < 6500 THEN 50 WHEN rank."position" < 6500 THEN 50
WHEN rank."position" < 25000 THEN 40 WHEN rank."position" < 25000 THEN 40
WHEN rank."position" < 100000 THEN 30 WHEN rank."position" < 100000 THEN 30
ELSE 20 ELSE 20
END AS score END AS score
FROM FROM
( SELECT ha2.id, (
rank() OVER (ORDER BY ha2.reputation DESC) AS "position" SELECT
FROM hive_accounts ha2 ha.id as id
, CASE WHEN ha2.rank ISNULL THEN 10e6 ELSE ha2.rank END AS "position"
FROM
hive_accounts ha
LEFT JOIN
(
SELECT
ha3.id
, rank() OVER(order by ha3.reputation DESC) as rank
FROM hive_accounts ha3
ORDER BY ha3.reputation DESC LIMIT 150000
-- only 2% of account has the same reputations, it means only 2000 in 100000, but we get 150000 as 50% would repeat
) as ha2 ON ha2.id = ha.id
) rank ) rank
; ;
...@@ -31,9 +43,6 @@ AS $BODY$ ...@@ -31,9 +43,6 @@ AS $BODY$
END; END;
$BODY$; $BODY$;
DROP FUNCTION IF EXISTS notification_id CASCADE; DROP FUNCTION IF EXISTS notification_id CASCADE;
; ;
CREATE OR REPLACE FUNCTION notification_id(in _block_number INTEGER, in _notifyType INTEGER, in _id INTEGER) CREATE OR REPLACE FUNCTION notification_id(in _block_number INTEGER, in _notifyType INTEGER, in _id INTEGER)
...@@ -49,10 +58,22 @@ $function$ ...@@ -49,10 +58,22 @@ $function$
LANGUAGE plpgsql IMMUTABLE LANGUAGE plpgsql IMMUTABLE
; ;
DROP FUNCTION IF EXISTS public.calculate_value_of_vote_on_post;
CREATE OR REPLACE FUNCTION public.calculate_value_of_vote_on_post(
_post_payout hive_posts.payout%TYPE
, _post_rshares hive_posts_view.rshares%TYPE
, _vote_rshares hive_votes.rshares%TYPE)
RETURNS FLOAT
LANGUAGE 'sql'
IMMUTABLE
AS $BODY$
SELECT CAST( ( _post_payout/_post_rshares ) * _vote_rshares as FLOAT);
$BODY$;
-- View: public.hive_raw_notifications_as_view -- View: public.hive_raw_notifications_as_view
DROP VIEW IF EXISTS public.hive_raw_notifications_as_view CASCADE; DROP VIEW IF EXISTS public.hive_raw_notifications_as_view CASCADE;
CREATE OR REPLACE VIEW public.hive_raw_notifications_as_view CREATE OR REPLACE VIEW public.hive_raw_notifications_as_view
AS AS
SELECT notifs.block_num, SELECT notifs.block_num,
...@@ -86,78 +107,122 @@ CREATE OR REPLACE VIEW public.hive_raw_notifications_as_view ...@@ -86,78 +107,122 @@ CREATE OR REPLACE VIEW public.hive_raw_notifications_as_view
''::character varying AS community_title, ''::character varying AS community_title,
''::character varying AS payload ''::character varying AS payload
FROM hive_posts_pp_view hpv FROM hive_posts_pp_view hpv
WHERE hpv.block_num >= block_before_head('90 days'::interval) and hpv.depth > 0 AND WHERE hpv.depth > 0 AND
NOT EXISTS (SELECT NULL::text NOT EXISTS (SELECT NULL::text
FROM hive_follows hf FROM hive_follows hf
WHERE hf.follower = hpv.parent_author_id AND hf.following = hpv.author_id AND hf.state = 2) WHERE hf.follower = hpv.parent_author_id AND hf.following = hpv.author_id AND hf.state = 2)
UNION ALL UNION ALL
SELECT hf.block_num, SELECT hf.block_num,
notification_id(hf.block_num, 15, hf.id) AS id, notification_id(hf.block_num, 15, hf.id) AS id,
0 AS post_id, 0 AS post_id,
15 AS type_id, 15 AS type_id,
hf.created_at, hf.created_at,
hf.follower AS src, hf.follower AS src,
hf.following AS dst, hf.following AS dst,
0 as dst_post_id, 0 as dst_post_id,
''::character varying(16) AS community, ''::character varying(16) AS community,
''::character varying AS community_title, ''::character varying AS community_title,
''::character varying AS payload ''::character varying AS payload
FROM hive_follows hf FROM hive_follows hf
WHERE hf.block_num >= block_before_head('90 days'::interval) UNION ALL
UNION ALL SELECT hr.block_num,
SELECT hr.block_num, notification_id(hr.block_num, 14, hr.id) AS id,
notification_id(hr.block_num, 14, hr.id) AS id, hp.id AS post_id,
hp.id AS post_id, 14 AS type_id,
14 AS type_id, hr.created_at,
hr.created_at, hr.blogger_id AS src,
hr.blogger_id AS src, hp.author_id AS dst,
hp.author_id AS dst, hr.post_id as dst_post_id,
hr.post_id as dst_post_id, ''::character varying(16) AS community,
''::character varying(16) AS community, ''::character varying AS community_title,
''::character varying AS community_title, ''::character varying AS payload
''::character varying AS payload FROM hive_reblogs hr
FROM hive_reblogs hr JOIN hive_posts hp ON hr.post_id = hp.id
JOIN hive_posts hp ON hr.post_id = hp.id UNION ALL
WHERE hr.block_num >= block_before_head('90 days'::interval) SELECT hs.block_num,
UNION ALL notification_id(hs.block_num, 11, hs.id) AS id,
SELECT hs.block_num, 0 AS post_id,
notification_id(hs.block_num, 11, hs.id) AS id, 11 AS type_id,
0 AS post_id, hs.created_at,
11 AS type_id, hs.account_id AS src,
hs.created_at, hs.community_id AS dst,
hs.account_id AS src, 0 as dst_post_id,
hs.community_id AS dst, hc.name AS community,
0 as dst_post_id, hc.title AS community_title,
hc.name AS community, ''::character varying AS payload
hc.title AS community_title, FROM hive_subscriptions hs
''::character varying AS payload JOIN hive_communities hc ON hs.community_id = hc.id
FROM hive_subscriptions hs UNION ALL
JOIN hive_communities hc ON hs.community_id = hc.id SELECT hm.block_num,
WHERE hs.block_num >= block_before_head('90 days'::interval) notification_id(hm.block_num, 16, hm.id) AS id,
UNION ALL hm.post_id,
SELECT hm.block_num, 16 AS type_id,
notification_id(hm.block_num, 16, hm.id) AS id, hp.created_at,
hm.post_id, hp.author_id AS src,
16 AS type_id, hm.account_id AS dst,
hp.created_at, hm.post_id as dst_post_id,
hp.author_id AS src, ''::character varying(16) AS community,
hm.account_id AS dst, ''::character varying AS community_title,
hm.post_id as dst_post_id, ''::character varying AS payload
''::character varying(16) AS community, FROM hive_mentions hm
''::character varying AS community_title, JOIN hive_posts hp ON hm.post_id = hp.id
''::character varying AS payload ) notifs
FROM hive_mentions hm JOIN hive_accounts_rank_view harv ON harv.id = notifs.src
JOIN hive_posts hp ON hm.post_id = hp.id ;
WHERE hm.block_num >= block_before_head('90 days'::interval)
) notifs
JOIN hive_accounts_rank_view harv ON harv.id = notifs.src
;
drop view if exists hive_raw_notifications_view_noas cascade; DROP VIEW IF EXISTS hive_raw_notifications_view_noas cascade;
CREATE OR REPLACE VIEW hive_raw_notifications_view_noas CREATE OR REPLACE VIEW hive_raw_notifications_view_noas
AS AS
SELECT -- new community SELECT -- votes
hc.block_num as block_num vn.block_num
, vn.id
, vn.post_id
, vn.type_id
, vn.created_at
, vn.src
, vn.dst
, vn.dst_post_id
, vn.community
, vn.community_title
, CASE
WHEN vn.vote_value < 0.01 THEN ''::VARCHAR
ELSE CAST( to_char(vn.vote_value, '($FM99990.00)') AS VARCHAR )
END as payload
, vn.score
FROM
(
SELECT
hv1.block_num
, notification_id(hv1.block_num, 17, hv1.id::integer) AS id
, hpv.id AS post_id
, 17 AS type_id
, hv1.last_update AS created_at
, hv1.voter_id AS src
, hpv.author_id AS dst
, hpv.id AS dst_post_id
, ''::VARCHAR(16) AS community
, ''::VARCHAR AS community_title
, calculate_value_of_vote_on_post(hpv.payout + hpv.pending_payout, hpv.rshares, hv1.rshares) AS vote_value
, calculate_notify_vote_score(hpv.payout + hpv.pending_payout, hpv.abs_rshares, hv1.rshares) AS score
FROM hive_votes hv1
JOIN
(
SELECT
hpvi.id
, hpvi.author_id
, hpvi.payout
, hpvi.pending_payout
, hpvi.abs_rshares
, hpvi.vote_rshares as rshares
FROM hive_posts hpvi
WHERE hpvi.block_num > block_before_head('97 days'::interval)
) hpv ON hv1.post_id = hpv.id
WHERE hv1.rshares >= 10e9
) as vn
WHERE vn.vote_value >= 0.02
UNION ALL
SELECT -- new community
hc.block_num as block_num
, notification_id(hc.block_num, 11, hc.id) as id , notification_id(hc.block_num, 11, hc.id) as id
, 0 as post_id , 0 as post_id
, 1 as type_id , 1 as type_id
...@@ -171,26 +236,6 @@ SELECT -- new community ...@@ -171,26 +236,6 @@ SELECT -- new community
, 35 as score , 35 as score
FROM FROM
hive_communities hc hive_communities hc
where hc.block_num >= block_before_head( '90 days' )
UNION ALL
SELECT --votes
hv1.block_num as block_num
, notification_id(hv1.block_num, 17, CAST( hv1.id as INT) ) as id
, hpv.id as post_id
, 17 as type_id
, hv1.last_update as created_at
, hv1.voter_id as src
, hpv.author_id as dst
, hpv.id as dst_post_id
, ''::VARCHAR(16) as community
, ''::VARCHAR as community_title
, ''::VARCHAR as payload
, calculate_notify_vote_score( (hpv.payout + hpv.pending_payout), hpv.abs_rshares, hv1.rshares ) as score
FROM
hive_votes hv1
JOIN (SELECT hpvi.id, hpvi.author_id, hpvi.payout, hpvi.pending_payout, hpvi.abs_rshares
FROM hive_posts_base_view hpvi WHERE hpvi.block_num > block_before_head( '90 days' ) ) as hpv ON hv1.post_id = hpv.id
WHERE hv1.rshares >= 10e9 and hv1.block_num > block_before_head( '90 days' )
UNION ALL UNION ALL
SELECT --persistent notifs SELECT --persistent notifs
hn.block_num hn.block_num
...@@ -207,11 +252,9 @@ UNION ALL ...@@ -207,11 +252,9 @@ UNION ALL
, hn.score as score , hn.score as score
FROM hive_notifs hn FROM hive_notifs hn
JOIN hive_communities hc ON hn.community_id = hc.id JOIN hive_communities hc ON hn.community_id = hc.id
where hn.block_num >= block_before_head( '90 days' )
; ;
DROP VIEW IF EXISTS hive_raw_notifications_view cascade DROP VIEW IF EXISTS hive_raw_notifications_view CASCADE;
;
CREATE OR REPLACE VIEW hive_raw_notifications_view CREATE OR REPLACE VIEW hive_raw_notifications_view
AS AS
SELECT * SELECT *
...@@ -221,4 +264,4 @@ FROM ...@@ -221,4 +264,4 @@ FROM
UNION ALL UNION ALL
SELECT * FROM hive_raw_notifications_view_noas SELECT * FROM hive_raw_notifications_view_noas
) as notifs ) as notifs
WHERE notifs.block_num >= block_before_head( '90 days' ) AND notifs.score >= 0; WHERE notifs.score >= 0;