diff --git a/.dockerignore b/.dockerignore index 261e54b4dcfc5990a0afa785628de2d9099c5626..c02c45c3dc141587f2eb3764b9b576676d2d751b 100644 --- a/.dockerignore +++ b/.dockerignore @@ -9,7 +9,6 @@ docker/.env* docker/docker-compose* docker/README.md gui/ -haf/ misc/ server/ tests/ @@ -18,7 +17,6 @@ tests/ .gitattributes .gitignore .gitlab-ci.yml -.gitmodules .sqlfluff docker-bake.hcl Dockerfile diff --git a/.gitignore b/.gitignore index 0cd99382245c88f0f03580564dfb1236a6c02dde..844e1dcf590562bc34b3a3b20e1d20b51c8f2c60 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ tests/regression/results/*.out endpoints_openapi venv/ + +# Downloaded from common-ci-configuration at runtime +scripts/process_openapi.py diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 68459e1a41e11071af10b40cb0b1c539bb985766..02daf60082d1c44350588b163b14bbe682275e33 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -8,18 +8,17 @@ variables: DOCKER_BUILDER_TAG: "$CI_COMMON_JOB_VERSION" DOCKER_DIND_TAG: "$CI_COMMON_JOB_VERSION" IMAGE_REMOVER_TAG: "$CI_COMMON_JOB_VERSION" + # common-ci-configuration reference for fetching scripts + COMMON_CI_REF: "develop" # Git configuration # Fetch strategy reuses workspace between jobs, reducing GitLab server load. # Full clone (depth 0) enables efficient incremental fetches - shallow clones # don't reduce server CPU and make fetch less effective. GIT_STRATEGY: fetch GIT_DEPTH: 0 - GIT_SUBMODULE_DEPTH: 0 - GIT_SUBMODULE_STRATEGY: recursive # Temporary: separate clone path prevents clone-strategy jobs from erasing # fetch workspaces during transition. Remove once all projects use fetch. GIT_CLONE_PATH: $CI_BUILDS_DIR/fetch/$CI_RUNNER_SHORT_TOKEN/$CI_CONCURRENT_ID/$CI_PROJECT_PATH - GIT_SUBMODULE_UPDATE_FLAGS: --jobs 4 include: - template: Workflows/Branch-Pipelines.gitlab-ci.yml @@ -33,7 +32,6 @@ default: hooks: pre_get_sources_script: # Clean corrupt git state left by cancelled pipelines (see GitLab #296638, #4600) - # Wrapped in subshell to avoid changing working directory for subsequent git operations - | ( cd "${CI_PROJECT_DIR:-/builds}" 2>/dev/null || exit 0 @@ -47,31 +45,6 @@ default: echo "pre_get_sources: main repository corrupt, forcing fresh clone" rm -rf .git else - # Main repo OK - check and clean corrupt submodules - # Check both the working dir and .git/modules/ since either can be corrupt - if [ -f ".gitmodules" ]; then - git config --file .gitmodules --get-regexp path 2>/dev/null | awk '{print $2}' | while read submod; do - needs_clean=false - [ -z "$submod" ] && continue - # Check if submodule working directory exists but is corrupt - if [ -d "$submod" ] && [ -f "$submod/.git" ]; then - if ! git -C "$submod" rev-parse HEAD >/dev/null 2>&1; then - needs_clean=true - fi - fi - # Check if .git/modules exists but is corrupt (even if working dir is gone) - if [ -d ".git/modules/$submod" ]; then - if ! git --git-dir=".git/modules/$submod" rev-parse HEAD >/dev/null 2>&1; then - echo "pre_get_sources: $submod corrupt (rev-parse failed)" - needs_clean=true - fi - fi - if [ "$needs_clean" = true ]; then - echo "pre_get_sources: cleaning corrupt submodule: $submod" - rm -rf "$submod" ".git/modules/$submod" - fi - done - fi echo "pre_get_sources: existing repo OK" fi else diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 3f9daf596e8d054eaa46ea447aecb7602e1d88da..0000000000000000000000000000000000000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "haf"] - path = haf - url = ../haf.git diff --git a/CLAUDE.md b/CLAUDE.md index 3ba44dbe7ae37b7ef61122acd21abef7c83cd2f2..71056964584244cd6854ba5e5cb01054a422e6ec 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -18,7 +18,7 @@ NFT Tracker is a blockchain-based NFT (Non-Fungible Token) management and tracki | Database | PostgreSQL 14+ (via HAF - Hive Application Framework) | | API | PostgREST (auto-generates REST from PostgreSQL) | | Gateway | Nginx/OpenResty (URL rewriting, proxying) | -| Framework | HAF submodule for blockchain integration | +| Framework | HAF database extension (no submodule, uses common-ci-configuration) | | Testing | pg_regress (SQL), Tavern (API tests) | | CI/CD | GitLab CI with Docker Buildx | @@ -45,6 +45,7 @@ nft_tracker/ │ ├── uninstall_app.sh # Remove from PostgreSQL │ ├── process_blocks.sh # Main block processor │ ├── start_postgrest.sh # Start API server +│ ├── setup_db.sh # HAF database setup (for tests) │ └── ci-helpers/ # CI build scripts ├── docker/ # Docker configs │ └── scripts/ # Entrypoint, healthcheck @@ -56,7 +57,6 @@ nft_tracker/ │ ├── api_tests/ # Tavern REST API tests │ ├── prelude.sql # Test helper functions │ └── setup.sql # Test setup -├── haf/ # HAF submodule ├── Dockerfile # Main app container ├── Dockerfile.rewriter # Nginx gateway container ├── docker-bake.hcl # Docker Buildx config @@ -190,7 +190,7 @@ SWAGGER_URL # default: localhost ## Key Patterns -- **OpenAPI from SQL**: Spec embedded in SQL comments, processed by HAF's `process_openapi.py` +- **OpenAPI from SQL**: Spec embedded in SQL comments, processed by `process_openapi.py` (fetched from common-ci-configuration) - **Symbol Format**: `namespace/name` (e.g., `alice/CARD`) - **Custom Types**: `nft_type`, `nft_instance` composite types for API responses - **Authorization**: Symbol creator manages authorized issuers; instances have single holder diff --git a/haf b/haf deleted file mode 160000 index bf820442979eff6c7cb7e387f26cd4ccf9345f3c..0000000000000000000000000000000000000000 --- a/haf +++ /dev/null @@ -1 +0,0 @@ -Subproject commit bf820442979eff6c7cb7e387f26cd4ccf9345f3c diff --git a/scripts/openapi_rewrite.sh b/scripts/openapi_rewrite.sh index be7708e745137e6eda3d807681e1ac1cc11b6341..683346bf13c89d0ba814284a4ac0d348a41fdfd5 100755 --- a/scripts/openapi_rewrite.sh +++ b/scripts/openapi_rewrite.sh @@ -5,7 +5,16 @@ set -o pipefail SCRIPTDIR="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 || exit 1; pwd -P )" -haf_dir="$SCRIPTDIR/../haf" +# Fetch process_openapi.py from common-ci-configuration if not available locally +COMMON_CI_REF="${COMMON_CI_REF:-develop}" +COMMON_CI_URL="${COMMON_CI_URL:-https://gitlab.syncad.com/hive/common-ci-configuration/-/raw/${COMMON_CI_REF}}" +PROCESS_OPENAPI="${SCRIPTDIR}/process_openapi.py" + +if [[ ! -f "$PROCESS_OPENAPI" ]]; then + echo "Fetching process_openapi.py from common-ci-configuration (ref: ${COMMON_CI_REF})..." + curl -fsSL "${COMMON_CI_URL}/haf-app-tools/python/process_openapi.py" -o "$PROCESS_OPENAPI" +fi + endpoints="endpoints" rewrite_dir="${endpoints}_openapi" input_file="rewrite_rules.conf" @@ -91,7 +100,7 @@ echo "$ENDPOINTS_IN_ORDER" # run openapi rewrite script # shellcheck disable=SC2086 -python3 $haf_dir/scripts/process_openapi.py $OUTPUT $ENDPOINTS_IN_ORDER +python3 "$PROCESS_OPENAPI" $OUTPUT $ENDPOINTS_IN_ORDER # Create rewrite_rules.conf reverse_lines > "$temp_output_file" diff --git a/scripts/setup_db.sh b/scripts/setup_db.sh new file mode 100755 index 0000000000000000000000000000000000000000..efe93876a6758ac5b1b91fb67c862ee05a8abda2 --- /dev/null +++ b/scripts/setup_db.sh @@ -0,0 +1,121 @@ +#!/bin/bash +# +# Setup HAF database for testing +# Based on haf/scripts/setup_db.sh but simplified for nft_tracker tests +# +# This script creates a fresh HAF database with the hive_fork_manager extension. +# The unix user account executing this script must be associated to the DB_ADMIN role. +# + +set -euo pipefail + +print_help () { + echo "Usage: $0 [OPTION[=VALUE]]..." + echo + echo "Create and setup a database to be filled with HAF data. Drops any already existing HAF database!!!" + echo "OPTIONS:" + echo " --host=VALUE Specify a PostgreSQL host location (defaults to /var/run/postgresql)." + echo " --port=NUMBER Specify a PostgreSQL operating port (defaults to 5432)." + echo " --haf-db-name=NAME Specify the HAF database name to use." + echo " --haf-app-user=NAME Specify name of a database role to act as an APP user of the HAF database." + echo " Specify multiple times to add multiple roles." + echo " The role MUST already exist on the Postgres cluster!!!" + echo " --haf-db-admin=NAME Specify name of a database admin role with permission to create the database and install the HAF extension." + echo " The role MUST already exist on the Postgres cluster!!!" + echo " If omitted, defaults to haf_admin role." + echo " --no-create-schema Skips the final steps of creating the schema, extension and database roles." + echo " --version Specify the hive fork manager version to use." + echo " --help Display this help screen and exit." + echo +} + +DB_NAME="haf_block_log" +DB_ADMIN="haf_admin" +HAF_TABLESPACE_NAME="haf_tablespace" + +DEFAULT_DB_USERS=() +DB_USERS=() +POSTGRES_HOST="/var/run/postgresql" +POSTGRES_PORT=5432 +NO_CREATE_SCHEMA=false +VERSION= + +while [ $# -gt 0 ]; do + case "$1" in + --host=*) + POSTGRES_HOST="${1#*=}" + ;; + --port=*) + POSTGRES_PORT="${1#*=}" + ;; + --haf-db-name=*) + DB_NAME="${1#*=}" + ;; + --haf-app-user=*) + USER="${1#*=}" + DB_USERS+=($USER) + DEFAULT_DB_USERS=() # clear all default users. + ;; + --haf-db-admin=*) + DB_ADMIN="${1#*=}" + ;; + --help) + print_help + exit 0 + ;; + --no-create-schema) + NO_CREATE_SCHEMA=true + ;; + --version=*) + VERSION="${1#*=}" + if [ "$VERSION" != "${VERSION//[^a-zA-Z0-9]/}" ]; then + echo "Invalid version $VERSION" + exit 3 + fi + VERSION="VERSION '${VERSION}'" + ;; + -*) + echo "ERROR: '$1' is not a valid option." + echo + print_help + exit 1 + ;; + *) + echo "ERROR: '$1' is not a valid argument." + echo + print_help + exit 2 + ;; + esac + shift +done + +POSTGRES_ACCESS="--host $POSTGRES_HOST --port $POSTGRES_PORT" + +DB_USERS+=("${DEFAULT_DB_USERS[@]}") + +# Create database +sudo -Enu "$DB_ADMIN" psql -aw $POSTGRES_ACCESS -d postgres -v ON_ERROR_STOP=on -U "$DB_ADMIN" -f - << EOF + DROP DATABASE IF EXISTS "$DB_NAME"; + CREATE DATABASE "$DB_NAME" WITH OWNER $DB_ADMIN TABLESPACE ${HAF_TABLESPACE_NAME} encoding UTF8 LC_COLLATE 'C' LC_CTYPE 'C' TEMPLATE template0; +EOF + + +if [ ${NO_CREATE_SCHEMA} = true ]; then + exit 0 +fi + + +# Install HAF extension +sudo -Enu "$DB_ADMIN" psql -aw $POSTGRES_ACCESS -d "$DB_NAME" -v ON_ERROR_STOP=on -U "$DB_ADMIN" -c "CREATE EXTENSION hive_fork_manager $VERSION CASCADE;" + +sudo -Enu "$DB_ADMIN" psql -aw $POSTGRES_ACCESS -d postgres -v ON_ERROR_STOP=on -U "$DB_ADMIN" -f - << EOF + GRANT CREATE ON DATABASE "$DB_NAME" to hive_applications_owner_group; +EOF + +for u in "${DB_USERS[@]}"; do + sudo -Enu "$DB_ADMIN" psql -aw $POSTGRES_ACCESS -d postgres -v ON_ERROR_STOP=on -U "$DB_ADMIN" -f - << EOF + GRANT CREATE ON DATABASE "$DB_NAME" TO $u; +EOF + +done diff --git a/tests/api_tests/run_api_tests.sh b/tests/api_tests/run_api_tests.sh index 998e8a840958467d1611bfd25104b27c62eea17d..7475bd789732b93b6e57453a3851666b00ecfebe 100755 --- a/tests/api_tests/run_api_tests.sh +++ b/tests/api_tests/run_api_tests.sh @@ -4,7 +4,7 @@ set -euo pipefail SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" PROJECT_ROOT="$(git -C "$SCRIPT_DIR" rev-parse --show-toplevel)" -SETUP_DB_SCRIPT="$PROJECT_ROOT/haf/scripts/setup_db.sh" +SETUP_DB_SCRIPT="$PROJECT_ROOT/scripts/setup_db.sh" INSTALL_APP_SCRIPT="$PROJECT_ROOT/scripts/install_app.sh" START_POSTGREST_SCRIPT="$PROJECT_ROOT/scripts/start_postgrest.sh" POSTGREST_CONF="$PROJECT_ROOT/postgrest.conf" diff --git a/tests/regression/launcher.sh b/tests/regression/launcher.sh index aac4df625e5d8f38f250440a585b1989e09d5938..c150dda2e1d4363e2df51a2f5c7b5010837a6554 100755 --- a/tests/regression/launcher.sh +++ b/tests/regression/launcher.sh @@ -3,7 +3,7 @@ set -e SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" PROJECT_ROOT="$(git -C "$SCRIPT_DIR" rev-parse --show-toplevel)" -SETUP_SCRIPT="$PROJECT_ROOT/haf/scripts/setup_db.sh" +SETUP_SCRIPT="$PROJECT_ROOT/scripts/setup_db.sh" : "${PGPORT:?Error: PGPORT is not defined}" : "${PGHOST:?Error: PGHOST is not defined}"