Filtered calls don't work with HAFAH
Call:
{
id: 0,
jsonrpc: '2.0',
method: 'condenser_api.get_account_history',
params: [ 'deathwing', -1, 1000, null, '1' ]
}
Response:
{
jsonrpc: '2.0',
error: { code: -32003, message: 'unknown SQL exception' },
id: 0
}
HAFAH:
2022-10-04 19:21:02,359 - hafah.adapter - ERROR - [pid=106] Got exception `OperationalError` in query `SELECT * FROM hafah_python.ah_get_account_history_json( NULL, 1, 'deathwing', 9223372036854775807 ::BIGINT, 1000, false, true )`
2022-10-04 19:21:02,359 - SQL - ERROR - got unknown SQL exception: OperationalError (psycopg2.errors.UndefinedFile) could not access file "$libdir/libhfm-b9d2537.so": No such file or directory
CONTEXT: SQL statement "SELECT -- hafah_python.ah_get_account_history
(
CASE
WHEN ho.trx_in_block < 0 THEN '0000000000000000000000000000000000000000'
ELSE encode( (SELECT htv.trx_hash FROM hive.transactions_view htv WHERE ho.trx_in_block >= 0 AND ds.block_num = htv.block_num AND ho.trx_in_block = htv.trx_in_block), 'hex')
END
) AS _trx_id,
ds.block_num AS _block,
(
CASE
WHEN ho.trx_in_block < 0 THEN 4294967295
ELSE ho.trx_in_block
END
) AS _trx_in_block,
ho.op_pos::BIGINT AS _op_in_trx,
hot.is_virtual AS virtual_op,
btrim(to_json(ho."timestamp")::TEXT, '"'::TEXT) AS formated_timestamp,
(
CASE
WHEN _is_legacy_style THEN hive.get_legacy_style_operation(ho.body)::TEXT
ELSE ho.body
END
) AS _value,
ds.account_op_seq_no AS _operation_id
FROM
(
SELECT hao.operation_id, hao.op_type_id,hao.block_num, hao.account_op_seq_no
FROM hive.account_operations_view hao
WHERE hao.account_id = __account_id AND hao.account_op_seq_no <= _start AND hao.block_num <= __upper_block_limit AND (__use_filter IS NULL OR hao.op_type_id=ANY(__resolved_filter))
ORDER BY hao.account_op_seq_no DESC
LIMIT _limit
) ds
JOIN LATERAL (SELECT hov.body, hov.op_pos, hov.timestamp, hov.trx_in_block FROM hive.operations_view hov WHERE ds.operation_id = hov.id) ho ON TRUE
JOIN LATERAL (select ot.is_virtual FROM hive.operation_types ot WHERE ds.op_type_id = ot.id) hot on true
ORDER BY ds.account_op_seq_no ASC"
PL/pgSQL function hafah_python.ah_get_account_history(bigint,bigint,character varying,bigint,bigint,boolean,boolean) line 38 at RETURN QUERY
SQL statement "SELECT (
WITH result AS (SELECT ARRAY(
SELECT json_build_array(
ops.operation_id,
(
CASE
WHEN _is_legacy_style THEN to_jsonb(ops) - 'operation_id'
ELSE jsonb_set(to_jsonb(ops), ARRAY['operation_id']::TEXT[], '0'::JSONB, FALSE)
END
)
) FROM (
SELECT
_block AS "block",
_value ::json AS "op",
_op_in_trx AS "op_in_trx",
_timestamp AS "timestamp",
_trx_id AS "trx_id",
_trx_in_block AS "trx_in_block",
_virtual_op AS "virtual_op",
_operation_id AS "operation_id"
FROM
hafah_python.ah_get_account_history(
hafah_python.numeric_to_bigint(_filter_low),
hafah_python.numeric_to_bigint(_filter_high),
_account,
_start,
_limit,
_include_reversible,
_is_legacy_style
)
) ops
) AS a)
SELECT
(
CASE
WHEN _is_legacy_style THEN to_json(result.a)
ELSE json_build_object('history', to_json(result.a))
END
)
FROM result
)"
PL/pgSQL function hafah_python.ah_get_account_history_json(numeric,numeric,character varying,bigint,bigint,boolean,boolean) line 3 at RETURN
[SQL: SELECT * FROM hafah_python.ah_get_account_history_json( NULL, 1, 'deathwing', 9223372036854775807 ::BIGINT, 1000, false, true )]
(Background on this error at: http://sqlalche.me/e/14/e3q8)
Notes:
HAFAH built from latest develop directly. (Dockerfile)
account_history_api.get_account_history
with the same parameters return "Request Timeout".