Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 13-implement-unit-tests-to-verify-get_impacted_balances-behavior
  • 139-separate-c-cpp-code
  • 194-supercronic-maintenance-service
  • 249-analyze-after-creating-indices
  • 249-app-indices-api
  • 249-concurrent-app-indices
  • 49-c-locale-text-columns
  • 92-hive-operation-member-function
  • add-docker-labels
  • add-plpython
  • add-timescaledb-extension
  • ak-test
  • akociubinski/hafah-account-history-api
  • block-interface-debugging
  • bw_139-improved-from-json-conversion
  • bw_build_instance_fix
  • bw_disable-constraint-validity-checks
  • bw_docker_entrypoint_fix
  • bw_image_publishing_fix
  • bw_lock_changes
  • bw_mi_cte_for_reversible
  • bw_mr632_cleanup
  • bw_openapi_generation_fix
  • bw_postgresql-16-take-three
  • bw_table_partitioning_and_other_opts
  • bw_table_partitioning_and_other_opts-mt-tuning
  • bw_table_partitioning_and_other_opts-mt-tuning-with-idxs
  • bw_table_partitioning_and_other_opts_2
  • bw_tm-ops-as-hive-operation
  • c-locale-faster
  • ci-test-debugging
  • csp_from_stable
  • debug-connection-closing-message
  • debug-get-block-ids
  • develop
  • develop_check_keyauth_hive_alone
  • disable-table-logging
  • docker-compose-script-tweaks
  • emf_haf_dockerized_setup_supplement_tests
  • fix-ambiguous-app_create_context
  • fix-replay-error-deadlock
  • g-maintenance
  • generate_block_testing
  • generate_block_testing_make
  • gm_dockerfile
  • hive-block-log-interface
  • hive-docker-fix
  • imp-no-pic-for-exes
  • imp-no-pic-for-exes-fc_pic
  • jziebinski/bump-tt
  • kbotor/ci-rewrite-for-parallel-replay
  • kbotor/extended-block-log-job-test
  • kbotor/faketime-image
  • kbotor/permlink-logging
  • km_direct_sql_serial
  • km_removed_post_apply_operation-mt-version
  • km_removed_post_apply_operation_2
  • kmochocki/add-private-public-key-pair
  • kmochocki/beekeepy
  • kmochocki/comparsion-tests
  • kmochocki/issue_74
  • kmochocki/issue_79
  • kmochocki/just_update_hived
  • kmochocki/mi/drop_hive_schema
  • kmochocki/mzander/sql-serializer
  • kmochocki/pattern_fixes
  • kmochocki/tmp
  • kmochocki/tmp_only_irreversible
  • kmochocki/update-test-tools
  • kudmich/block_log_for_denser
  • kudmich/full_block_generator
  • latest_hived
  • lucius/test-branch-1
  • lucius/test-branch-2
  • mahdiyari/nov16-develop-unlogged-tables
  • mahdiyari/unlogged-tables
  • master
  • mi/crqash_when_restart_livesync
  • mi/improve_update_testing
  • mi/issue#245_slow_processing_alarm
  • mi/issue#247_triggers_in_context_schema
  • mi/issue#272_remove_hive_rowid
  • mi/issue_252_analyze
  • mi/log_applications
  • mi/master_merge_with_develop
  • mi/modified_autodetach
  • mi/modified_autodetach_2
  • mi/new_gcc_warnings
  • mi/pruning
  • mi/query_supervisor_crash
  • mi/vectors
  • mi_contexts_before_first_next_blocks_are_synchronized
  • mi_cte_for_reversible
  • mi_dtach_and_find_next_event
  • mi_dtach_and_find_next_event2
  • mi_fix_for_root_branch
  • mi_for_hivemind
  • mi_fork_and_set_irreversible_exception_break__node
  • mi_funtional_tests_cleanup
  • mi_funtional_tests_refactor
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.5
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc1
  • 1.27.6rc3
  • 1.27.6rc4
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • 20220201_auto
  • 20220214_auto
  • ChangesDatabaseSchema_2022_12_20
  • test-tag
  • unprotected
  • v-protected
  • v-protected-2
  • v-protected-3
  • v-protected-4
  • v-protected-5
  • v-protected-6
  • v1.27.3.0
  • v1.27.4.0
  • v1.27.5.0-rc0
  • v1.27.5.0-rc7
  • v1.27.6rc2
142 results

Target

Select target project
  • hive/haf
  • dan/haf
2 results
Select Git revision
  • 13-implement-unit-tests-to-verify-get_impacted_balances-behavior
  • 139-separate-c-cpp-code
  • 194-supercronic-maintenance-service
  • 249-analyze-after-creating-indices
  • 249-app-indices-api
  • 249-concurrent-app-indices
  • 49-c-locale-text-columns
  • 92-hive-operation-member-function
  • add-docker-labels
  • add-plpython
  • add-timescaledb-extension
  • ak-test
  • akociubinski/hafah-account-history-api
  • block-interface-debugging
  • bw_139-improved-from-json-conversion
  • bw_build_instance_fix
  • bw_disable-constraint-validity-checks
  • bw_docker_entrypoint_fix
  • bw_image_publishing_fix
  • bw_lock_changes
  • bw_mi_cte_for_reversible
  • bw_mr632_cleanup
  • bw_openapi_generation_fix
  • bw_postgresql-16-take-three
  • bw_table_partitioning_and_other_opts
  • bw_table_partitioning_and_other_opts-mt-tuning
  • bw_table_partitioning_and_other_opts-mt-tuning-with-idxs
  • bw_table_partitioning_and_other_opts_2
  • bw_tm-ops-as-hive-operation
  • c-locale-faster
  • ci-test-debugging
  • csp_from_stable
  • debug-connection-closing-message
  • debug-get-block-ids
  • develop
  • develop_check_keyauth_hive_alone
  • disable-table-logging
  • docker-compose-script-tweaks
  • emf_haf_dockerized_setup_supplement_tests
  • fix-ambiguous-app_create_context
  • fix-replay-error-deadlock
  • g-maintenance
  • generate_block_testing
  • generate_block_testing_make
  • gm_dockerfile
  • hive-block-log-interface
  • hive-docker-fix
  • imp-no-pic-for-exes
  • imp-no-pic-for-exes-fc_pic
  • jziebinski/bump-tt
  • kbotor/ci-rewrite-for-parallel-replay
  • kbotor/extended-block-log-job-test
  • kbotor/faketime-image
  • kbotor/permlink-logging
  • km_direct_sql_serial
  • km_removed_post_apply_operation-mt-version
  • km_removed_post_apply_operation_2
  • kmochocki/add-private-public-key-pair
  • kmochocki/beekeepy
  • kmochocki/comparsion-tests
  • kmochocki/issue_74
  • kmochocki/issue_79
  • kmochocki/just_update_hived
  • kmochocki/mi/drop_hive_schema
  • kmochocki/mzander/sql-serializer
  • kmochocki/pattern_fixes
  • kmochocki/tmp
  • kmochocki/tmp_only_irreversible
  • kmochocki/update-test-tools
  • kudmich/block_log_for_denser
  • kudmich/full_block_generator
  • latest_hived
  • lucius/test-branch-1
  • lucius/test-branch-2
  • mahdiyari/nov16-develop-unlogged-tables
  • mahdiyari/unlogged-tables
  • master
  • mi/crqash_when_restart_livesync
  • mi/improve_update_testing
  • mi/issue#245_slow_processing_alarm
  • mi/issue#247_triggers_in_context_schema
  • mi/issue#272_remove_hive_rowid
  • mi/issue_252_analyze
  • mi/log_applications
  • mi/master_merge_with_develop
  • mi/modified_autodetach
  • mi/modified_autodetach_2
  • mi/new_gcc_warnings
  • mi/pruning
  • mi/query_supervisor_crash
  • mi/vectors
  • mi_contexts_before_first_next_blocks_are_synchronized
  • mi_cte_for_reversible
  • mi_dtach_and_find_next_event
  • mi_dtach_and_find_next_event2
  • mi_fix_for_root_branch
  • mi_for_hivemind
  • mi_fork_and_set_irreversible_exception_break__node
  • mi_funtional_tests_cleanup
  • mi_funtional_tests_refactor
  • 1.27.10
  • 1.27.11rc1
  • 1.27.11rc2
  • 1.27.11rc3
  • 1.27.11rc4
  • 1.27.11rc5
  • 1.27.5
  • 1.27.5rc8
  • 1.27.5rc9
  • 1.27.6rc1
  • 1.27.6rc3
  • 1.27.6rc4
  • 1.27.6rc5
  • 1.27.6rc6
  • 1.27.6rc7
  • 1.27.6rc8
  • 1.27.6rc9
  • 1.27.7rc10
  • 1.27.7rc11
  • 1.27.7rc12
  • 1.27.7rc13
  • 1.27.7rc14
  • 1.27.7rc15
  • 1.27.7rc16
  • 1.27.8
  • 1.27.9
  • 20220201_auto
  • 20220214_auto
  • ChangesDatabaseSchema_2022_12_20
  • test-tag
  • unprotected
  • v-protected
  • v-protected-2
  • v-protected-3
  • v-protected-4
  • v-protected-5
  • v-protected-6
  • v1.27.3.0
  • v1.27.4.0
  • v1.27.5.0-rc0
  • v1.27.5.0-rc7
  • v1.27.6rc2
142 results
Show changes
Commits on Source (1471)
Showing with 1284 additions and 451 deletions
build
.vs
Dockerfile
*.log
# Consistent behavior for all users, regardless of their Git settings and environment.
# Always convert line endings to LF on checkout.
* text=auto eol=lf
......@@ -39,6 +39,17 @@ build-*/
build_*/
/.vs
CMakeSettings.json
#clion build directories
cmake-build-*/
.idea/
\ No newline at end of file
.idea/
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
.python-version
*.log
venv
.venv
This diff is collapsed.
[submodule "hive"]
path = hive
url = https://gitlab.syncad.com/hive/hive.git
url = ../hive.git
CMAKE_MINIMUM_REQUIRED( VERSION 3.14 )
SET( CMAKE_POSITION_INDEPENDENT_CODE ON )
SET( CMAKE_CXX_STANDARD 17 )
SET( CMAKE_CXX_STANDARD_REQUIRED ON )
list( APPEND CMAKE_MODULE_PATH
"${CMAKE_CURRENT_SOURCE_DIR}/cmake"
"${CMAKE_CURRENT_SOURCE_DIR}/hive/cmake"
"${CMAKE_CURRENT_SOURCE_DIR}/hive/libraries/fc/GitVersionGen" )
#This must be processed before first project or enable_language statement
INCLUDE(hive_build_types)
PROJECT( haf )
SET( Boost_NO_BOOST_CMAKE ON CACHE STRING "ON or OFF" FORCE )
SET( HAF_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} )
FILE( CREATE_LINK ${CMAKE_CURRENT_SOURCE_DIR}/src/sql_serializer ${CMAKE_CURRENT_SOURCE_DIR}/hive/libraries/plugins/sql_serializer SYMBOLIC )
list( APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake" )
INCLUDE( compiler )
if( "${CMAKE_GENERATOR}" STREQUAL "Ninja" )
ENABLE_NINJA_COLORFUL_OUTPUT()
endif()
ADD_SUBDIRECTORY( hive EXCLUDE_FROM_ALL )
# Read whole HAF revision (using already imported Git tools from Hive project)
include( GetGitRevisionDescription )
get_git_head_revision_dir("${CMAKE_CURRENT_SOURCE_DIR}" HAF_GIT_REFSPEC HAF_GIT_REVISION_SHA)
MESSAGE(STATUS "Detected HAF project git revision: ${HAF_GIT_REVISION_SHA}")
SET_TARGET_PROPERTIES( hived PROPERTIES EXCLUDE_FROM_ALL 0 )
SET_TARGET_PROPERTIES( cli_wallet PROPERTIES EXCLUDE_FROM_ALL 0 )
SET_TARGET_PROPERTIES( get_dev_key PROPERTIES EXCLUDE_FROM_ALL 0 )
SET_TARGET_PROPERTIES( truncate_block_log PROPERTIES EXCLUDE_FROM_ALL 0 )
SET_TARGET_PROPERTIES( compress_block_log PROPERTIES EXCLUDE_FROM_ALL 0 )
SET_TARGET_PROPERTIES( block_log_util PROPERTIES EXCLUDE_FROM_ALL 0 )
IF ( NOT DEFINED POSTGRES_INSTALLATION_DIR )
SET( POSTGRES_INSTALLATION_DIR "/usr/lib/postgresql/12/bin" )
FIND_PROGRAM( POSTGRES_PG_CONFIG NAMES pg_config )
IF ( POSTGRES_PG_CONFIG )
EXECUTE_PROCESS(COMMAND ${POSTGRES_PG_CONFIG} --bindir OUTPUT_VARIABLE POSTGRES_INSTALLATION_DIR OUTPUT_STRIP_TRAILING_WHITESPACE)
ELSE()
SET( POSTGRES_INSTALLATION_DIR "/usr/lib/postgresql/14/bin" )
ENDIF()
ENDIF()
SET( CMAKE_POSITION_INDEPENDENT_CODE ON )
SET( CMAKE_SKIP_INSTALL_ALL_DEPENDENCY TRUE )
SET( BUILD_SHARED_LIBS ON )
INCLUDE( ExternalProject )
SET( CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake )
INCLUDE( hive_options )
INCLUDE( hive_targets )
INCLUDE( clangtidy )
INCLUDE( compiler )
INCLUDE( git )
INCLUDE( libraries )
INCLUDE( postgres )
INCLUDE( sql_extension )
......@@ -33,8 +62,6 @@ INCLUDE( targets )
INCLUDE( tests )
SETUP_OUTPUT_DIRECTORIES()
GENERATE_GIT_VERSION_FILE()
GET_RUNTIME_POSTGRES_VARIABLES()
set(CMAKE_INSTALL_RPATH ${POSTGRES_LIBDIR})
......
# docker build -f Dockerfile -t haf .
FROM phusion/baseimage:focal-1.0.0
# Base docker file having defined environment for build and run of HAF instance.
# docker buildx build --progress=plain --target=ci-base-image --tag registry.gitlab.syncad.com/hive/haf/ci-base-image$CI_IMAGE_TAG --file Dockerfile .
# To be started from cloned haf source directory.
ARG CI_REGISTRY_IMAGE=registry.gitlab.syncad.com/hive/haf/
ARG CI_IMAGE_TAG=ubuntu22.04-8
ENV LANG=en_US.UTF-8
ARG BUILD_IMAGE_TAG
RUN \
apt-get update \
&& apt-get install -y \
systemd \
autoconf \
postgresql \
postgresql-contrib \
build-essential \
cmake \
libboost-all-dev \
postgresql-server-dev-12 \
git \
python3-pip \
libssl-dev \
libreadline-dev \
libsnappy-dev \
libpqxx-dev \
clang \
clang-tidy \
&& \
apt-get clean
FROM registry.gitlab.syncad.com/hive/hive/minimal-runtime:ubuntu22.04-10 AS minimal-runtime
RUN \
python3 -mpip install \
pexpect \
psycopg2 \
sqlalchemy \
jinja2
ENV PATH="/home/haf_admin/.local/bin:$PATH"
SHELL ["/bin/bash", "-c"]
USER root
WORKDIR /usr/local/src
COPY ./hive/scripts/openssl.conf /usr/local/src/hive/scripts/openssl.conf
COPY ./hive/scripts/setup_ubuntu.sh /usr/local/src/hive/scripts/
COPY ./scripts/setup_ubuntu.sh /usr/local/src/scripts/
# create required accounts
RUN ./scripts/setup_ubuntu.sh --haf-admin-account="haf_admin" --hived-account="hived" && rm -rf /var/lib/apt/lists/*
# install postgres. Installation automatically does an initdb, so remove the 29+MB database that we don't need afterwards
RUN apt-get update && \
DEBIAN_FRONTEND=noniteractive apt-get install --no-install-recommends -y curl postgresql libpq5 libboost-chrono1.74.0 libboost-context1.74.0 libboost-filesystem1.74.0 libboost-thread1.74.0 && \
rm -rf /var/lib/postgresql/14/main /var/lib/apt/lists/*
# change the UID and GID to match the ones postgres is assigned in our non-minimal runtime
RUN (chown -Rf --from=postgres 105 / || true) && (chown -Rf --from=:postgres :109 / || true) && usermod -u 105 postgres && groupmod -g 109 postgres
RUN usermod -a -G users -c "PostgreSQL daemon account" postgres
USER haf_admin
WORKDIR /home/haf_admin
ADD . /usr/local/src
FROM registry.gitlab.syncad.com/hive/hive/ci-base-image:ubuntu22.04-10 AS ci-base-image
ENV PATH="/home/haf_admin/.local/bin:$PATH"
SHELL ["/bin/bash", "-c"]
USER root
WORKDIR /usr/local/src
COPY ./hive/scripts/openssl.conf /usr/local/src/hive/scripts/openssl.conf
COPY ./hive/scripts/setup_ubuntu.sh /usr/local/src/hive/scripts/
COPY ./scripts/setup_ubuntu.sh /usr/local/src/scripts/
# Install development packages and create required accounts
RUN ./scripts/setup_ubuntu.sh --dev --haf-admin-account="haf_admin" --hived-account="hived" \
&& rm -rf /var/lib/apt/lists/*
USER haf_admin
WORKDIR /home/haf_admin
# Install additionally packages located in user directory
RUN /usr/local/src/scripts/setup_ubuntu.sh --user
FROM ${CI_REGISTRY_IMAGE}ci-base-image:$CI_IMAGE_TAG AS build
ARG BUILD_HIVE_TESTNET=OFF
ENV BUILD_HIVE_TESTNET=${BUILD_HIVE_TESTNET}
ARG ENABLE_SMT_SUPPORT=OFF
ENV ENABLE_SMT_SUPPORT=${ENABLE_SMT_SUPPORT}
ARG HIVE_CONVERTER_BUILD=OFF
ENV HIVE_CONVERTER_BUILD=${HIVE_CONVERTER_BUILD}
ARG HIVE_LINT=OFF
ENV HIVE_LINT=${HIVE_LINT}
ARG HIVE_SUBDIR=.
ENV HIVE_SUBDIR=${HIVE_SUBDIR}
ENV HAF_SOURCE_DIR="/home/haf_admin/source/${HIVE_SUBDIR}"
USER haf_admin
WORKDIR /home/haf_admin
SHELL ["/bin/bash", "-c"]
# Get everything from cwd as sources to be built.
COPY --chown=haf_admin:users . /home/haf_admin/source
RUN \
"${HAF_SOURCE_DIR}/scripts/build.sh" --haf-source-dir="${HAF_SOURCE_DIR}" --haf-binaries-dir="./build" \
--cmake-arg="-DBUILD_HIVE_TESTNET=${BUILD_HIVE_TESTNET}" \
--cmake-arg="-DENABLE_SMT_SUPPORT=${ENABLE_SMT_SUPPORT}" \
--cmake-arg="-DHIVE_CONVERTER_BUILD=${HIVE_CONVERTER_BUILD}" \
--cmake-arg="-DHIVE_LINT=${HIVE_LINT}" \
&& \
cd ./build && \
find . -name *.o -type f -delete && \
find . -name *.a -type f -delete
# Here we could use a smaller image without packages specific to build requirements
FROM ${CI_REGISTRY_IMAGE}ci-base-image:$CI_IMAGE_TAG as base_instance
ENV BUILD_IMAGE_TAG=${BUILD_IMAGE_TAG:-:ubuntu22.04-8}
ARG P2P_PORT=2001
ENV P2P_PORT=${P2P_PORT}
ARG WS_PORT=8090
ENV WS_PORT=${WS_PORT}
ARG HTTP_PORT=8091
ENV HTTP_PORT=${HTTP_PORT}
ARG HIVE_SUBDIR=.
ENV HIVE_SUBDIR=${HIVE_SUBDIR}
ENV HAF_SOURCE_DIR="/home/haf_admin/source/${HIVE_SUBDIR}"
# Environment variable which allows to override default postgres access specification in pg_hba.conf
ENV PG_ACCESS="host haf_block_log haf_app_admin 172.0.0.0/8 trust\nhost all pghero 172.0.0.0/8 trust"
# Always define default value of HIVED_UID variable to make possible direct spawn of docker image (without run_hived_img.sh wrapper)
ENV HIVED_UID=1000
SHELL ["/bin/bash", "-c"]
USER hived
WORKDIR /home/hived
RUN mkdir -p /home/hived/bin && \
mkdir /home/hived/shm_dir && \
mkdir /home/hived/datadir && \
chown -Rc hived:users /home/hived/
COPY --from=build --chown=hived:users \
/home/haf_admin/build/hive/programs/hived/hived \
/home/haf_admin/build/hive/programs/cli_wallet/cli_wallet \
/home/haf_admin/build/hive/programs/util/* \
/home/haf_admin/build/hive/programs/blockchain_converter/blockchain_converter* \
/home/haf_admin/build/tests/unit/* \
/home/hived/bin/
USER haf_admin
WORKDIR /home/haf_admin
COPY --from=build --chown=haf_admin:users /home/haf_admin/build /home/haf_admin/build/
COPY --from=build --chown=haf_admin:users "${HAF_SOURCE_DIR}" "${HAF_SOURCE_DIR}"
ENV POSTGRES_VERSION=14
COPY --from=build --chown=haf_admin:users "${HAF_SOURCE_DIR}/docker/docker_entrypoint.sh" .
COPY --from=build --chown=postgres:postgres "${HAF_SOURCE_DIR}/docker/postgresql.conf" /etc/postgresql/$POSTGRES_VERSION/main/postgresql.conf
COPY --from=build --chown=postgres:postgres "${HAF_SOURCE_DIR}/docker/pg_hba.conf" /etc/postgresql/$POSTGRES_VERSION/main/pg_hba.conf.default
ENV DATADIR=/home/hived/datadir
# Use default location (inside datadir) of shm file. If SHM should be placed on some different device, then set it to mapped volume `/home/hived/shm_dir` and map it in docker run
ENV SHM_DIR=${DATADIR}/blockchain
ENV WAL_DIR=${DATADIR}/blockchain/haf_wal
STOPSIGNAL SIGINT
# JSON rpc service
EXPOSE ${HTTP_PORT}
ENTRYPOINT [ "/home/haf_admin/docker_entrypoint.sh" ]
ARG BUILD_TIME
ARG GIT_COMMIT_SHA
ARG GIT_CURRENT_BRANCH
ARG GIT_LAST_LOG_MESSAGE
ARG GIT_LAST_COMMITTER
ARG GIT_LAST_COMMIT_DATE
LABEL org.opencontainers.image.created="$BUILD_TIME"
LABEL org.opencontainers.image.url="https://hive.io/"
LABEL org.opencontainers.image.documentation="https://gitlab.syncad.com/hive/haf"
LABEL org.opencontainers.image.source="https://gitlab.syncad.com/hive/haf"
#LABEL org.opencontainers.image.version="${VERSION}"
LABEL org.opencontainers.image.revision="$GIT_COMMIT_SHA"
LABEL org.opencontainers.image.licenses="MIT"
LABEL org.opencontainers.image.ref.name="HAF Core"
LABEL org.opencontainers.image.title="Hive Application Framework (HAF) Core Image"
LABEL org.opencontainers.image.description="Runs both the PostgreSQL database server and the hived instance that feeds it blockchain data"
LABEL io.hive.image.branch="$GIT_CURRENT_BRANCH"
LABEL io.hive.image.commit.log_message="$GIT_LAST_LOG_MESSAGE"
LABEL io.hive.image.commit.author="$GIT_LAST_COMMITTER"
LABEL io.hive.image.commit.date="$GIT_LAST_COMMIT_DATE"
FROM ${CI_REGISTRY_IMAGE}base_instance:${BUILD_IMAGE_TAG} as instance
# Embedded postgres service
EXPOSE 5432
EXPOSE ${P2P_PORT}
# websocket service
EXPOSE ${WS_PORT}
# JSON rpc service
EXPOSE ${HTTP_PORT}
FROM registry.gitlab.syncad.com/hive/haf/minimal-runtime:ubuntu22.04-10 AS minimal-instance
ENV BUILD_IMAGE_TAG=${BUILD_IMAGE_TAG:-:ubuntu22.04-8}
ARG P2P_PORT=2001
ENV P2P_PORT=${P2P_PORT}
ARG WS_PORT=8090
ENV WS_PORT=${WS_PORT}
ARG HTTP_PORT=8091
ENV HTTP_PORT=${HTTP_PORT}
ARG HIVE_SUBDIR=.
ENV HIVE_SUBDIR=${HIVE_SUBDIR}
ENV HAF_SOURCE_DIR="/home/haf_admin/source/${HIVE_SUBDIR}"
# Environment variable which allows to override default postgres access specification in pg_hba.conf
ENV PG_ACCESS="host haf_block_log haf_app_admin 172.0.0.0/8 trust\nhost all pghero 172.0.0.0/8 trust"
# Always define default value of HIVED_UID variable to make possible direct spawn of docker image (without run_hived_img.sh wrapper)
ENV HIVED_UID=1000
ENV POSTGRES_VERSION=14
SHELL ["/bin/bash", "-c"]
USER hived
WORKDIR /home/hived
RUN mkdir -p /home/hived/bin && \
mkdir /home/hived/shm_dir && \
mkdir /home/hived/wal_dir && \
mkdir /home/hived/datadir && \
chown -Rc hived:users /home/hived/
COPY --from=build --chown=hived:users \
/home/haf_admin/build/hive/programs/hived/hived \
/home/haf_admin/build/hive/programs/cli_wallet/cli_wallet \
/home/haf_admin/build/hive/programs/util/compress_block_log \
/home/hived/bin/
COPY --from=build \
/home/haf_admin/build/extensions/hive_fork_manager/* \
/usr/share/postgresql/${POSTGRES_VERSION}/extension
COPY --from=build \
/home/haf_admin/build/lib/libquery_supervisor.so \
/usr/lib/postgresql/${POSTGRES_VERSION}/lib
COPY --from=build \
/home/haf_admin/build/lib/libhfm-* \
/usr/lib/postgresql/${POSTGRES_VERSION}/lib
# set a variable telling the entrypoint not to try to install the extension from source, we just did it above
ENV HAF_INSTALL_EXTENSION=no
USER haf_admin
WORKDIR /home/haf_admin
COPY --from=build --chown=haf_admin:users "${HAF_SOURCE_DIR}/docker/docker_entrypoint.sh" .
RUN mkdir -p /home/haf_admin/source/scripts /home/haf_admin/source/hive/scripts && chown -R haf_admin:users /home/haf_admin/source
COPY --from=build --chown=haf_admin:users "${HAF_SOURCE_DIR}/scripts/" /home/haf_admin/source/scripts
COPY --from=build --chown=haf_admin:users "${HAF_SOURCE_DIR}/hive/scripts/" /home/haf_admin/source/hive/scripts
COPY --from=build --chown=postgres:postgres "${HAF_SOURCE_DIR}/docker/postgresql.conf" /etc/postgresql/$POSTGRES_VERSION/main/postgresql.conf
COPY --from=build --chown=postgres:postgres "${HAF_SOURCE_DIR}/docker/pg_hba.conf" /etc/postgresql/$POSTGRES_VERSION/main/pg_hba.conf.default
ENV DATADIR=/home/hived/datadir
# Use default location (inside datadir) of shm file. If SHM should be placed on some different device, then set it to mapped volume `/home/hived/shm_dir` and map it in docker run
ENV SHM_DIR=${DATADIR}/blockchain
ENV WAL_DIR=${DATADIR}/blockchain/haf_wal
STOPSIGNAL SIGINT
# JSON rpc service
EXPOSE ${HTTP_PORT}
USER postgres
RUN /etc/init.d/postgresql start \
&& psql --command "CREATE USER root WITH SUPERUSER CREATEDB;"
ENTRYPOINT [ "/home/haf_admin/docker_entrypoint.sh" ]
USER root
\ No newline at end of file
ARG BUILD_TIME
ARG GIT_COMMIT_SHA
ARG GIT_CURRENT_BRANCH
ARG GIT_LAST_LOG_MESSAGE
ARG GIT_LAST_COMMITTER
ARG GIT_LAST_COMMIT_DATE
LABEL org.opencontainers.image.created="$BUILD_TIME"
LABEL org.opencontainers.image.url="https://hive.io/"
LABEL org.opencontainers.image.documentation="https://gitlab.syncad.com/hive/haf"
LABEL org.opencontainers.image.source="https://gitlab.syncad.com/hive/haf"
#LABEL org.opencontainers.image.version="${VERSION}"
LABEL org.opencontainers.image.revision="$GIT_COMMIT_SHA"
LABEL org.opencontainers.image.licenses="MIT"
LABEL org.opencontainers.image.ref.name="HAF Core"
LABEL org.opencontainers.image.title="Hive Application Framework (HAF) Core Image"
LABEL org.opencontainers.image.description="Runs both the PostgreSQL database server and the hived instance that feeds it blockchain data"
LABEL io.hive.image.branch="$GIT_CURRENT_BRANCH"
LABEL io.hive.image.commit.log_message="$GIT_LAST_LOG_MESSAGE"
LABEL io.hive.image.commit.author="$GIT_LAST_COMMITTER"
LABEL io.hive.image.commit.date="$GIT_LAST_COMMIT_DATE"
# syntax=docker/dockerfile:1.4
# docker buildx build --tag registry.gitlab.syncad.com/hive/haf/ci-base-image:$CI_IMAGE_TAG-jmeter --progress=plain --file Dockerfile.jmeter .
ARG CI_IMAGE_TAG=ubuntu22.04-8
FROM phusion/baseimage:jammy-1.0.1 AS build
COPY <<-EOF /opt/patch.sed
s/jtl2junit/m2u/g
s/results file/results file (required)/g
23 i final Options helpOpt = new Options();
23 i helpOpt.addOption("?", "help", false, "");
23 i helpOpt.addOption(new Option("i", CMD_OPTION_INPUT, true, ""));
23 i helpOpt.addOption(new Option("o", CMD_OPTION_OUTPUT, true, ""));
23 i helpOpt.addOption(new Option("t", CMD_OPTION_TESTSUITE_NAME, true, ""));
23 i helpOpt.addOption(new Option("f", M2UConstants.JUNIT_FILTER_SWITCH_NAME, true, ""));
23 i final CommandLine helpCmd = parser.parse( helpOpt, argv );
23 i if (helpCmd.hasOption("help")) {
23 i new HelpFormatter().printHelp( APPLICATION_NAME, options );
23 i System.exit(0);
23 i }
72 i options.addOption("?", "help", false, "Show these usage instructions");
EOF
RUN <<EOF
set -e
# Install system dependencies
apt-get update
apt-get install -y git unzip wget ca-certificates maven openjdk-8-jdk
apt-get clean
rm -rf /var/lib/apt/lists/*
# Prepare tools directory
mkdir -p /opt/tools
cd /opt/tools
# Install Apache JMeter
wget --quiet https://archive.apache.org/dist/jmeter/binaries/apache-jmeter-5.4.3.zip -O jmeter.zip
unzip -qq jmeter.zip
rm jmeter.zip
mv apache-jmeter-5.4.3 jmeter
wget --quiet https://jdbc.postgresql.org/download/postgresql-42.3.1.jar -O /opt/tools/jmeter/lib/postgresql-42.3.1.jar
# Build m2u from source
mkdir -p m2u
git clone --single-branch --branch master https://github.com/tguzik/m2u.git m2u-source
cd m2u-source
find -name CommandLineParser.java -exec sed -i -f /opt/patch.sed {} \;
mvn
# Install m2u
mv target/m2u.jar ../m2u/m2u.jar
cd ../m2u
rm -R ../m2u-source
echo 'java -jar /opt/tools/m2u/m2u.jar $@' > m2u
chmod +x m2u
EOF
FROM registry.gitlab.syncad.com/hive/haf/ci-base-image:$CI_IMAGE_TAG
COPY --from=build /opt/tools /opt/tools
USER root
RUN <<EOF
set -e
# Install system dependencies
apt-get update
apt-get install -y openjdk-8-jre
apt-get clean
rm -rf /var/lib/apt/lists/*
# Creater symlinks in bin directory
ln -s /opt/tools/jmeter/bin/jmeter /usr/bin/jmeter
ln -s /opt/tools/m2u/m2u /usr/bin/m2u
EOF
USER haf_admin
RUN <<EOF
set -e
# Install user dependencies
pip3 install prettytable
EOF
\ No newline at end of file
*
\ No newline at end of file
# docker build -f Dockerfile.postgres12 -t psql-tools12 .
FROM phusion/baseimage:focal-1.0.0
ENV LANG=en_US.UTF-8
RUN \
apt-get update \
&& apt-get install -y \
systemd \
postgresql \
postgresql-contrib \
build-essential \
cmake \
libboost-all-dev \
postgresql-server-dev-12 \
git \
python3-pip \
libssl-dev \
libreadline-dev \
&& \
apt-get clean
RUN \
python3 -mpip install \
pexpect \
psycopg2 \
sqlalchemy
ADD . /usr/local/src
WORKDIR /usr/local/src
RUN git submodule update --init --recursive
RUN mkdir build \
&& cd build \
&& cmake .. \
&& make \
&& make install
USER postgres
RUN /etc/init.d/postgresql start \
&& psql --command "SELECT version();" \
&& psql --command "CREATE USER root WITH SUPERUSER CREATEDB;" \
&& cd build \
&& ctest --debug -R test.functional.hive_fork_manager.*
# docker build -f Dockerfile.postgres13 -t psql-tools13 .
FROM phusion/baseimage:0.11
ENV LANG=en_US.UTF-8
RUN \
apt-get update \
&& apt-get install -y wget \
&& wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
&& echo "deb http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main" | tee /etc/apt/sources.list.d/postgresql-pgdg.list > /dev/null
RUN \
apt-get update \
&& apt-get install -y \
systemd \
postgresql-13 \
postgresql-contrib-13 \
build-essential \
cmake \
libboost-all-dev \
postgresql-server-dev-all \
git \
python3-pip \
&& \
apt-get clean
RUN \
python3 -mpip install \
pexpect \
psycopg2 \
sqlalchemy
ADD . /usr/local/src
WORKDIR /usr/local/src
RUN git submodule update --init --recursive
RUN mkdir build \
&& cd build \
&& cmake .. \
&& make \
&& make install
USER postgres
RUN /etc/init.d/postgresql start \
&& psql --command "CREATE USER root WITH SUPERUSER CREATEDB;" \
&& cd build \
&& ctest --debug -R test.functional.hive_fork_manager.*
# docker build -f Dockerfile.postgres13 -t psql-tools13 .
FROM phusion/baseimage:0.11
ENV LANG=en_US.UTF-8
RUN \
apt-get update \
&& apt-get install -y wget \
&& wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
&& echo "deb http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main" | tee /etc/apt/sources.list.d/postgresql-pgdg.list > /dev/null \
&& wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null \
&& apt-add-repository "deb https://apt.kitware.com/ubuntu/ $(lsb_release -cs) main"
RUN \
apt-get update \
&& apt-get install -y \
systemd \
autoconf \
postgresql-12 \
postgresql-contrib-12 \
build-essential \
cmake \
libboost-all-dev \
postgresql-server-dev-12 \
git \
python3-pip \
libssl-dev \
libreadline-dev \
libsnappy-dev \
&& \
apt-get clean
RUN \
python3 -mpip install \
pexpect \
psycopg2 \
sqlalchemy \
jinja2
ADD . /usr/local/src
WORKDIR /usr/local/src
USER postgres
RUN /etc/init.d/postgresql start \
&& psql --command "CREATE USER root WITH SUPERUSER CREATEDB;"
USER root
\ No newline at end of file
# Hive Application Framework
Contains the implementation of Hive Application Framework which encompass hive node plugin and Postgres specific tools providing
functionalities required by other projects storing blockchain data in the Postrges database.
# Compilation
## Requirements
1. Tested on Ubuntu 20.04
2. postgresql server dev package: `sudo apt-get install postgresql-dev-12`
3. ssl dev package: `sudo apt-get install libssl-dev`
4. readline dev package: `sudo apt-get install libreadline-dev`
5. pqxx dev package: `sudo apt-get install libpqxx-dev`
## CMake and make
This will build all the targets from the HAF repository and `hived` program from submodule `hive`. You can pass
the same CMake parameters which are used to compile hived project ( for example: -DCLEAR_VOTES=ON -DBUILD_HIVE_TESTNET=OFF -DHIVE_LINT=OFF).
2. `git submodule update --init --recursive`
3. create build directory, for exemple in sources root: `mkdir build`
4. `cd build`
5. `cmake -DCMAKE_BUILD_TYPE=Release ..`
6. `make`
### Choose version of the Postgres to compile with
CMake variable `POSTGRES_INSTALLATION_DIR` is used to point the installation folder
with PostgreSQL binaries, by default it is `/usr/lib/postgresql/12/bin` - place where Postgres v.12
is installed on Ubuntu. An example of choosing different version of Postgres:
1. create build directory, for exemple in sources root: `mkdir build`
2. `cd build`
3. `cmake -DPOSTGRES_INSTALLATION_DIR=/usr/lib/postgresql/10/bin ..`
4. `make`
# Tests
The project uses ctest to start tests, just execute in build directory `make test`
Test are grouped in a tree by names and `.` as a branch separator where 'test' is the root.
For example You can start all unit tests with command `ctest -R test.functional.*`
# Installation
Postgres plugins has to be copied into postgres `$libdir/plugins directory`
You can check postgres `$libdir` directory with: `pg_config --pkglibdir`
The best option is to execute `make install` from build directory (may required to have root privileges)
Note: whenever you build a new version of the hive_fork_marnager extension, you have to create a new database.
There is no way currently to upgrade the schema installed in your old HAF database.
# Architecture
## Directory structure
```
cmake Contains common functions used by cmake build
common_includes
include Constains library interfaces header files, to share them among the project items
doc Contains documentation
hive Submodule of hive project: https://gitlab.syncad.com/hive/hive
src Contains sources
sql_serializer C++ hived plugin which is compiled tohether with hived
transaction_controllers library with C++ utilities to controll Postgres transactions
hive_fork_manager Contains SQL extension which implements solution for hive forks
tests Contains test
integration Folder for non-unit tests like functional or system tests
functional Contains functional tests
unit Contains unit tests and mocks
mockups Contains mocks
```
There is also a `generated` directory inside the build directory. It contains autmatically generated headers which can be included
in the code whith ```#include "gen/header_file_name.hpp"```
## PSQL extension based on sql script
If there is a need to create psql extension ( to use CREATE EXTENSION psql command ) a cmake macro is added:
`ADD_PSQL_EXTENSION` with parameters:
- NAME - name of extension, in current source directory file <name>.control (see https://www.postgresql.org/docs/10/extend-extensions.html#id-1.8.3.18.11 )
- SOURCES - list of sql scripts, the order of the files is important since they are compiled into one sql script
The macro creates a new target extension.<name_of_extension>. The command 'make extension.<name_of_extension>' will create
an psql extension in `${CMAKE_BINARY_DIR}/extensions/<name>`.
To install the extension please execute 'make install'.
Warning: Make install will install all already builded project items, to install only one of them please build it
in separated build directory with making the only one target, for example: `make extension.hive_fork_manager; make install;`
### Versioning
Postgres extensions are versioned - extension control file contains `default_version` configuration entry. The build system
fills the entry with the repository git sha.
Also corresponding sql script file is named with the same version, as is required by the postgres.
# Known problems
# Overview of the Hive Application Framework (HAF)
The Hive Application Framework was developed to simplify the creation of highly scalable, blockchain-based applications. HAF-based apps are naturally resilient against blockchain forks because HAF contains a mechanism for automatically undoing data generated by forked out blocks.
HAF servers act as an intermediary between the Hive network and Hive applications. HAF-based applications do not normally read data directly from Hive nodes (aka hived process) via a pull model. Instead, HAF applications receive blockchain data via a push model: a hived node is configured with a *sql_serializer* plugin that processes each new block as it arrives at the hived node and writes the associated blockchain data (transactions, operations, virtual operations, etc) to a Postgres database. The server where this Postgres database is running is referred to as a HAF server.
Multiple HAF-based apps can run on a single HAF server, sharing the same HAF database, with each HAF app creating a separate schema where it stores app-specific data.
Since HAF servers receive their data via a push model, they impose a fixed amount of load on the hived node that supplies blockchain data, regardless of the number of HAF apps running on the server. In other words, while too many apps may load down the postgres database and affect the performance of other apps, the hived node supplying the data should continue to function without any problems.
HAF-app users publish transactions on the Hive network when they want to send data to a HAF-based app. Typically these transactions contain custom_json operations that contain information specifically customized for one or more HAF apps. These operations then get included into the blockchain and thereafter inserted into the HAF database for further processing by any HAF application that is watching for those particular operations. In other words, user actions aren't directly sent to app servers. Instead, they are published to the hived peer-to-peer network, included into the decentralized storage of the Hive blockchain, and then indirectly processed by HAF servers reading data from the blockchain.
An understanding of Hive's custom_json operations is critical to developing an interactive Hive app. A custom_json operation allows a user to embed one or more pieces of arbitrary json data into a Hive transaction. Interactive hive apps can utilize this feature to create a set of "commands" that their app recognizes and will process when a user publishes a transaction containing those commands.
![alt text](./doc/c2_haf.png)
The image above shows the main components of a HAF installation:
* **HIVED**
HAF requires a hived node which syncs blocks with other hived nodes in the Hive peer-to-peer network and pushes this data into the HAF database. This hived node doesn't need to be located on the HAF server itself, although in some cases this may allow for faster filling of a HAF database that needs to be massively synced (i.e. when you need to fill a database with a lot of already-produced blockchain blocks).
* **SQL_SERIALIZER**
sql_serializer is the hived plugin which is responsible for pushing the data from blockchain blocks into the HAF database. The plugin also informs the database about the occurrence of microforks (in which case HAF has to revert database changes that resulted from the forked out blocks). It also signals the database when a block has become irreversible (no longer revertable via a fork), so that the info from that block can be moved from the "reversible" tables inside the database to the "irreversible" tables.
Detailed documentation for the sql_serializer is here: [src/sql_serializer/README.md](./src/sql_serializer/README.md)
* **PostgreSQL database**
A HAF database contains data from blockchain blocks in the form of SQL tables (these tables are stored in the "hive" schema inside the database), and it also contains tables for the data generated by HAF apps running on the HAF server (each app has its own separate schema to encapsulate its data). The system utilizes Postgres authentication and authorization mechanisms to protect HAF-based apps from interfering with each other.
* **HIVE FORK MANAGER** is a PostgreSQL extension that implements HAF's API inside the "hive" schema. This extension must be included when creating a new HAF database. This extension defines the format of block data saved in the database. It also defines a set of SQL stored procedures that are used by HAF apps to get data about the blocks. The SQL_SERIALIZER dumps blocks to the tables defined by the hive_fork_manager. This extension defines the process by which HAF apps consume blocks, and ensures that apps cannot corrupt each other's data. The hive_fork_manager is also responsible for rewinding the state of the tables of all the HAF apps running on the server in the case of a micro-fork occurrence. Detailed documentation for hive_fork_manager is here: [src/hive_fork_manager/Readme.md](./src/hive_fork_manager/Readme.md)
# HAF server quickstart
**NOTE: The fastest and easiest way to install and maintain a HAF server is to use the docker compose scripts in this repo:
https://gitlab.syncad.com/hive/haf_api_node**
But if you prefer to build your own HAF docker image, you can follow the steps below:
```
git clone --recurse --branch develop https://gitlab.syncad.com/hive/haf.git
mkdir -p workdir/haf-datadir/blockchain
cd workdir
../haf/scripts/ci-helpers/build_instance.sh local-develop ../haf/ registry.gitlab.syncad.com/hive/haf/
```
Now you can either sync your hived node from scratch via the Hive P2P network, or you can download a copy of the blockchain's block_log file from a location you trust (e.g. https://gtg.openhive.network/get/blockchain/compressed/block_log) and replay the block_log. The latter method is typically faster, because a replay doesn't re-validate the blocks, but the first method (syncing from scratch) requires the least trust.
To start your HAF server, type:
```
../haf/scripts/run_hived_img.sh registry.gitlab.syncad.com/hive/haf/instance:local-develop --name=haf-instance --webserver-http-endpoint=8091 --webserver-ws-endpoint=8090 --data-dir=$(pwd)/haf-datadir --replay
```
If you don't have a local block_log file, just remove the `--replay` option from the command line above to get the blockchain blocks using the P2P network via the normal sync procedure.
It is advisable to have your own custom PostgreSQL config file in order to have PostgreSQL logs available locally and specify custom database access permissions. To do that, before starting your HAF server, just copy [doc/haf_postgresql_conf.d](./doc/haf_postgresql_conf.d) containing configuration files where you can override any PostgreSQL setting.
The steps above should create a `haf-datadir/haf_db_store` subdirectory containig a PostgreSQL database holding HAF data and `haf-datadir/hived.log` containing the output of the underlying hived process.
Use `docker container stop haf-instance` to safely stop the service.
See [dockerized deployment details](./doc/HAF_Detailed_Deployment.md#building-and-deploying-haf-inside-a-docker-container) for further details.
# HAF manual build and deloyment steps are described here: [doc/HAF_Detailed_Deployment.md](./doc/HAF_Detailed_Deployment.md)
......@@ -8,13 +8,21 @@ MACRO( SETUP_OUTPUT_DIRECTORIES )
ENDMACRO()
MACRO( SETUP_COMPILER target_name )
TARGET_COMPILE_OPTIONS( ${target_name} PRIVATE -std=c++14 -Wall )
TARGET_COMPILE_OPTIONS( ${target_name} PRIVATE -Wall )
TARGET_INCLUDE_DIRECTORIES( ${target_name}
PRIVATE
${PROJECT_SOURCE_DIR}/common_includes
"."
${GENERATED_FILES_DIRECTORY_ROOT}
# form hive project
${HAF_DIRECTORY}/hive/libraries/fc/include
)
ENDMACRO()
\ No newline at end of file
ENDMACRO()
MACRO( ENABLE_NINJA_COLORFUL_OUTPUT )
if( "${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" )
add_compile_options(-fcolor-diagnostics)
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
add_compile_options(-fdiagnostics-color=always)
else()
message( AUTHOR_WARNING "You are using the Ninja generator with the unsupported compiler. Colorful output may not be available." )
endif()
ENDMACRO()
MACRO( GENERATE_GIT_VERSION_FILE )
SET( GIT_REVISION "unknown" )
FIND_PACKAGE(Git)
IF ( GIT_FOUND )
EXECUTE_PROCESS(
COMMAND ${GIT_EXECUTABLE} rev-parse --short HEAD
WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}"
OUTPUT_VARIABLE GIT_REVISION
RESULT_VARIABLE GIT_STATUS
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE
)
if ( ${GIT_STATUS} AND NOT ${GIT_STATUS} EQUAL 0 )
message( FATAL_ERROR "GIT command resulted with error code: ${GIT_STATUS}" )
endif()
IF ( "${GIT_REVISION}" STREQUAL "" )
MESSAGE( FATAL_ERROR "GIT hash could not be retrieved" )
endif()
MESSAGE( STATUS "GIT hash: ${GIT_REVISION}" )
else()
MESSAGE( STATUS "GIT not found" )
MESSAGE( FATAL_ERROR "GIT not found" )
endif()
CONFIGURE_FILE( ${CMAKE_MODULE_PATH}/git_version.hpp.in ${GENERATED_FILES_DIRECTORY}/git_version.hpp @ONLY )
......
MACRO( ADD_BOOST_LIBRARIES target_name static_library )
SET( BOOST_COMPONENTS )
LIST( APPEND BOOST_COMPONENTS
thread
date_time
system
filesystem
program_options
serialization
unit_test_framework
context locale iostreams
)
IF( ${static_library} )
SET( Boost_USE_STATIC_LIBS ON CACHE STRING "ON or OFF" )
else()
SET( Boost_USE_STATIC_LIBS OFF CACHE STRING "ON or OFF" )
endif()
FIND_PACKAGE( Boost 1.53 REQUIRED COMPONENTS ${BOOST_COMPONENTS} )
TARGET_LINK_LIBRARIES( ${target_name} PRIVATE ${Boost_LIBRARIES} )
ENDMACRO()
MACRO( ADD_POSTGRES_LIBRARIES target_name )
FIND_LIBRARY(PQ_LIB pq)
IF ( PQ_LIB )
......@@ -32,4 +9,4 @@ ENDMACRO()
MACRO( ADD_POSTGRES_INCLUDES target_name )
TARGET_INCLUDE_DIRECTORIES( ${target_name} PRIVATE ${SERVER_INCLUDE_LIST_DIR} )
ENDMACRO()
\ No newline at end of file
ENDMACRO()
#!/bin/sh
for file in "$@"
do
cat ${file}
echo "\n"
done
......@@ -5,6 +5,8 @@ MACRO( GET_RUNTIME_POSTGRES_VARIABLES )
SET( POSTGRES_LIBDIR "NOTFOUND" )
SET( POSTGRES_SHAREDIR "NOTFOUND" )
SET( SERVER_INCLUDE_LIST_DIR "NOTFOUND" )
SET( POSTGRES_PORT "NOTFOUND" )
SET( POSTGRES_PKGLIBDIR "NOTFOUND" )
EXECUTE_PROCESS(
COMMAND ${POSTGRES_INSTALLATION_DIR}/pg_config --version
......@@ -50,11 +52,28 @@ MACRO( GET_RUNTIME_POSTGRES_VARIABLES )
)
LIST( APPEND SERVER_INCLUDE_LIST_DIR ${SERVER_INCLUDE_DIR} )
EXECUTE_PROCESS(
COMMAND /bin/bash ${PROJECT_SOURCE_DIR}/scripts/get_postgres_port.sh ${POSTGRES_VERSION}
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
OUTPUT_VARIABLE POSTGRES_PORT
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE
)
EXECUTE_PROCESS(
COMMAND ${POSTGRES_INSTALLATION_DIR}/pg_config --pkglibdir
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
OUTPUT_VARIABLE POSTGRES_PKGLIBDIR
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE
)
MESSAGE( STATUS "Postgres version: ${POSTGRES_VERSION}" )
MESSAGE( STATUS "Postgres libdir: ${POSTGRES_LIBDIR}" )
MESSAGE( STATUS "Postgres sharedir: ${POSTGRES_SHAREDIR}" )
MESSAGE( STATUS "Postgres serverer include dirs: ${SERVER_INCLUDE_LIST_DIR}" )
MESSAGE( STATUS "Postgres server include dirs: ${SERVER_INCLUDE_LIST_DIR}" )
MESSAGE( STATUS "Postgres server port: ${POSTGRES_PORT}" )
MESSAGE( STATUS "Postgres pkglibdir: ${POSTGRES_PKGLIBDIR}" )
IF ( NOT POSTGRES_LIBDIR )
MESSAGE( FATAL_ERROR "Unknown postgres libdir" )
......@@ -68,4 +87,8 @@ MACRO( GET_RUNTIME_POSTGRES_VARIABLES )
MESSAGE( FATAL_ERROR "Unknown postgres include dir" )
ENDIF()
IF ( NOT POSTGRES_PORT )
MESSAGE( FATAL_ERROR "Unknown postgres port" )
ENDIF()
ENDMACRO()
\ No newline at end of file
MACRO( ADD_PSQL_EXTENSION )
CMAKE_PARSE_ARGUMENTS( EXTENSION "" "NAME" SOURCES ${ARGN} )
set(multiValueArgs DEPLOY_SOURCES SCHEMA_SOURCES)
set(OPTIONS "")
set(oneValueArgs NAME )
FILE( MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/extensions )
FILE( MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/extensions/${EXTENSION_NAME} )
SET( extension_path ${CMAKE_BINARY_DIR}/extensions/${EXTENSION_NAME} )
SET( extension_control_file ${EXTENSION_NAME}.control )
CMAKE_PARSE_ARGUMENTS( EXTENSION "${OPTIONS}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} )
SET( extension_control_script ${extension_path}/${EXTENSION_NAME}--${GIT_REVISION}.sql )
MESSAGE( STATUS "EXTENSION_NAME: ${EXTENSION_NAME}" )
SET( PROXY_GIT_VER "PROXY--${GIT_REVISION}" )
SET( extension_path "${CMAKE_BINARY_DIR}/extensions/${EXTENSION_NAME}" )
MESSAGE( STATUS "VERSION: ${GIT_REVISION}" )
FILE( MAKE_DIRECTORY "${extension_path}" )
ADD_CUSTOM_COMMAND(
OUTPUT ${extension_path}/${extension_control_file} ${extension_path}/${extension_control_script}
COMMAND rm -rf ${extension_path}/*
COMMAND sed 's/@GIT_REVISION@/${GIT_REVISION}/g' ${extension_control_file} > ${extension_path}/${extension_control_file}
COMMAND ${CMAKE_MODULE_PATH}/merge_sql.sh ${EXTENSION_SOURCES} > ${extension_path}/${EXTENSION_NAME}--${PROXY_GIT_VER}.sql
COMMAND sed 's/@GIT_REVISION@/${GIT_REVISION}/g' ${extension_path}/${EXTENSION_NAME}--${PROXY_GIT_VER}.sql > ${extension_path}/${EXTENSION_NAME}--${GIT_REVISION}.sql
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
DEPENDS ${EXTENSION_SOURCES} ${extension_control_file}
COMMENT "Generate ${EXTENSION_NAME} to ${extension_path}"
)
SET( UPDATE_NAME "${EXTENSION_NAME}_update--${HAF_GIT_REVISION_SHA}" )
ADD_CUSTOM_TARGET( extension.${EXTENSION_NAME} ALL DEPENDS ${extension_path}/${extension_control_file} ${extension_path}/${extension_control_script} )
SET( update_control_script ${UPDATE_NAME}.sql )
INSTALL( DIRECTORY ${extension_path}/ DESTINATION ${POSTGRES_SHAREDIR}/extension OPTIONAL )
SET( extension_control_file ${EXTENSION_NAME}.control.in )
SET( extension_control_script ${EXTENSION_NAME}--${HAF_GIT_REVISION_SHA}.sql )
FILE(WRITE ${extension_path}/${update_control_script} "")
FILE(WRITE ${extension_path}/${extension_control_script} "")
SET( temp_deploy_sources deploy_sources.sql )
SET( temp_schema_sources schema_sources.sql )
MESSAGE( STATUS "VERSION: ${HAF_GIT_REVISION_SHA}" )
#MESSAGE( STATUS "EXTENSION_SCHEMA_SOURCES: ${EXTENSION_SCHEMA_SOURCES}")
#MESSAGE( STATUS "EXTENSION_DEPLOY_SOURCES: ${EXTENSION_DEPLOY_SOURCES}")
#cat function
FUNCTION(cat IN_FILE OUT_FILE)
FILE(READ ${IN_FILE} CONTENTS)
FILE(APPEND ${OUT_FILE} "${CONTENTS}")
ENDFUNCTION()
#concatination of deploy_sources.sql
FOREACH(EXTENSION_DEPLOY_SOURCES ${EXTENSION_DEPLOY_SOURCES})
cat(${EXTENSION_DEPLOY_SOURCES} ${extension_path}/${temp_deploy_sources})
ENDFOREACH()
CONFIGURE_FILE( "${extension_path}/deploy_sources.sql" "${extension_path}/${update_control_script}")
FILE (REMOVE ${extension_path}/${temp_deploy_sources})
#append table schema and function lists
LIST(APPEND EXTENSION_SCHEMA_SOURCES ${EXTENSION_DEPLOY_SOURCES})
#concatination of schema_sources.sql
FOREACH(EXTENSION_SCHEMA_SOURCES ${EXTENSION_SCHEMA_SOURCES})
cat(${EXTENSION_SCHEMA_SOURCES} ${extension_path}/${temp_schema_sources})
ENDFOREACH()
CONFIGURE_FILE( "${extension_path}/schema_sources.sql" "${extension_path}/${extension_control_script}")
FILE (REMOVE ${extension_path}/${temp_schema_sources})
MESSAGE( STATUS "CONFIGURING the update script generator script: ${CMAKE_BINARY_DIR}/extensions/${EXTENSION_NAME}/hive_fork_manager_update_script_generator.sh" )
CONFIGURE_FILE( "${CMAKE_CURRENT_SOURCE_DIR}/hive_fork_manager_update_script_generator.sh.in"
"${extension_path}/hive_fork_manager_update_script_generator.sh" @ONLY)
# Only needed to be able to run update script from ${CMAKE_CURRENT_SOURCE_DIR} dir
CONFIGURE_FILE( "${CMAKE_CURRENT_SOURCE_DIR}/hive_fork_manager_save_restore_views.sql"
"${extension_path}/hive_fork_manager_save_restore_views.sql" @ONLY)
MESSAGE( STATUS "CONFIGURING the control file: ${CMAKE_BINARY_DIR}/extensions/${EXTENSION_NAME}/hive_fork_manager.control" )
CONFIGURE_FILE( "${CMAKE_CURRENT_SOURCE_DIR}/${extension_control_file}"
"${extension_path}/hive_fork_manager.control" @ONLY)
ADD_CUSTOM_COMMAND(
OUTPUT "${extension_path}/${extension_control_file}" "${extension_path}/${extension_control_script}"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
DEPENDS ${EXTENSION_DEPLOY_SOURCES} ${EXTENSION_SCHEMA_SOURCES} ${extension_control_file}
COMMENT "Generating ${EXTENSION_NAME} files to ${extension_path}"
)
ADD_CUSTOM_COMMAND(
OUTPUT "${extension_path}/${update_control_script}"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
DEPENDS ${EXTENSION_DEPLOY_SOURCES}
COMMENT "Generating ${EXTENSION_NAME} helper update scripts to ${update_path}, final upgrade script: ${extension_path}/${update_control_script}"
)
ADD_CUSTOM_TARGET( extension.${EXTENSION_NAME} ALL DEPENDS ${extension_path}/${extension_control_file} ${extension_path}/${extension_control_script} ${extension_path}/${update_control_script} )
INSTALL ( FILES "${extension_path}/hive_fork_manager_update_script_generator.sh"
DESTINATION ${POSTGRES_SHAREDIR}/extension
PERMISSIONS OWNER_EXECUTE OWNER_WRITE OWNER_READ
GROUP_EXECUTE GROUP_READ
WORLD_EXECUTE WORLD_READ
)
INSTALL ( FILES "${CMAKE_CURRENT_SOURCE_DIR}/hive_fork_manager_save_restore_views.sql"
DESTINATION ${POSTGRES_SHAREDIR}/extension
PERMISSIONS OWNER_WRITE OWNER_READ
GROUP_EXECUTE GROUP_READ
WORLD_EXECUTE WORLD_READ
)
INSTALL ( FILES "${extension_path}/${update_control_script}" "${extension_path}/${EXTENSION_NAME}.control" "${extension_path}/${extension_control_script}"
DESTINATION ${POSTGRES_SHAREDIR}/extension
PERMISSIONS OWNER_WRITE OWNER_READ
GROUP_READ
WORLD_READ
)
ENDMACRO()
MACRO( ADD_RUNTIME_LOADED_LIB target_name )
MACRO( LIBRARIES_PARAMETERS )
SET(options NO_OPTIONS)
SET(oneValueArgs TARGET_NAME )
SET(multiValueArgs LINK_WITH )
CMAKE_PARSE_ARGUMENTS( LIBRARY "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} )
SET( target_name ${LIBRARY_TARGET_NAME} )
SET( test_lib test_${LIBRARY_TARGET_NAME} )
ENDMACRO()
MACRO( ADD_SUBDIRECTORY_WITH_INCLUDES subdirectory )
INCLUDE_DIRECTORIES( ${subdirectory}/include )
ADD_SUBDIRECTORY( ${subdirectory} )
ENDMACRO()
MACRO( ADD_RUNTIME_LOADED_LIB )
LIBRARIES_PARAMETERS( ${ARGV} )
FILE( GLOB_RECURSE sources ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp )
ADD_LIBRARY( ${target_name} SHARED ${sources} )
SETUP_COMPILER( ${target_name} )
SETUP_CLANG_TIDY( ${target_name} )
ADD_BOOST_LIBRARIES( ${target_name} FALSE )
ADD_POSTGRES_INCLUDES( ${target_name} )
ADD_POSTGRES_LIBRARIES( ${target_name} )
TARGET_LINK_LIBRARIES( ${target_name} PUBLIC ${LIBRARY_LINK_WITH} )
ENDMACRO()
MACRO( ADD_LOADTIME_LOADED_LIB target_name )
SET( test_lib test_${target_name} )
MACRO( ADD_LOADTIME_LOADED_LIB )
LIBRARIES_PARAMETERS( ${ARGV} )
FILE( GLOB_RECURSE sources ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp )
ADD_LIBRARY( ${target_name} MODULE ${sources} )
# test lib used by unit tests
......@@ -21,17 +39,19 @@ MACRO( ADD_LOADTIME_LOADED_LIB target_name )
SETUP_COMPILER( ${target_name} )
SETUP_COMPILER( ${test_lib} )
SETUP_CLANG_TIDY( ${target_name} )
ADD_BOOST_LIBRARIES( ${target_name} FALSE )
ADD_BOOST_LIBRARIES( ${test_lib} TRUE )
TARGET_COMPILE_DEFINITIONS( ${test_lib} PRIVATE UNITTESTS )
ADD_POSTGRES_INCLUDES( ${target_name} )
ADD_POSTGRES_INCLUDES( ${test_lib} )
ADD_POSTGRES_LIBRARIES( ${target_name} )
TARGET_LINK_LIBRARIES( ${target_name} PUBLIC ${LIBRARY_LINK_WITH} )
TARGET_LINK_LIBRARIES( ${test_lib} PUBLIC ${LIBRARY_LINK_WITH} )
ENDMACRO()
MACRO( ADD_STATIC_LIB target_name )
SET( test_lib test_${target_name} )
MACRO( ADD_STATIC_LIB )
LIBRARIES_PARAMETERS( ${ARGV} )
FILE( GLOB_RECURSE sources ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp )
ADD_LIBRARY( ${target_name} STATIC ${sources} )
# test lib used by unit tests
......@@ -40,11 +60,12 @@ MACRO( ADD_STATIC_LIB target_name )
SETUP_COMPILER( ${target_name} )
SETUP_COMPILER( ${test_lib} )
SETUP_CLANG_TIDY( ${target_name} )
ADD_BOOST_LIBRARIES( ${target_name} TRUE )
ADD_BOOST_LIBRARIES( ${test_lib} TRUE )
TARGET_COMPILE_DEFINITIONS( ${test_lib} PRIVATE UNITTESTS )
ADD_POSTGRES_INCLUDES( ${target_name} )
ADD_POSTGRES_INCLUDES( ${test_lib} )
ADD_POSTGRES_LIBRARIES( ${target_name} )
TARGET_LINK_LIBRARIES( ${target_name} PUBLIC ${LIBRARY_LINK_WITH} )
TARGET_LINK_LIBRARIES( ${test_lib} PUBLIC ${LIBRARY_LINK_WITH} )
ENDMACRO()