Skip to content
Snippets Groups Projects
Commit 0f810e9c authored by Anthony Martin's avatar Anthony Martin
Browse files

creates backfill #155

parent f6a86c2f
No related branches found
No related tags found
No related merge requests found
......@@ -97,3 +97,37 @@ class PrefixsubTest(unittest.TestCase):
}
}
self.assertEqual(object, expected_result)
def test_transform_prefix_transaction_ignore(self):
object = ["submit_transaction", {
"tx": {
"wif_sigs": ["zprivatekey:posting-tnmanz"],
"operations": [{
"type": "custom_json_operation",
"value": {
"json": "[\"follow\",{\"follower\":\"alice\",\"following\":\"bob\",\"what\":[\"blog\"]}]",
"required_auths": [],
"id": "follow",
"required_posting_auths": ["alice"]
}
}]
},
"esc": "z"
}]
prefixsub.transform_prefix(object)
expected_result = ["submit_transaction", {
"tx": {
"wif_sigs": ["zprivatekey:posting-tnmanz"],
"operations": [{
"type": "custom_json_operation",
"value": {
"json": "[\"follow\",{\"follower\":\"alice\",\"following\":\"bob\",\"what\":[\"blog\"]}]",
"required_auths": [],
"id": "follow",
"required_posting_auths": ["alice"]
}
}]
},
"esc": "z"
}]
self.assertEqual(object, expected_result)
......@@ -25,7 +25,7 @@ def str2bool(str_arg):
"""
return True if str_arg.lower() == 'true' else (False if str_arg.lower() == 'false' else None)
def repack_operations(conf, keydb, min_block, max_block):
def repack_operations(conf, keydb, min_block, max_block, from_blocks_ago, to_blocks_ago):
"""
Uses configuration file data to acquire operations from source node
blocks/transactions and repack them in new transactions one to one.
......@@ -39,6 +39,12 @@ def repack_operations(conf, keydb, min_block, max_block):
if min_block == 0:
min_block = dgpo["head_block_number"]
if from_blocks_ago != -1:
min_block = dgpo["head_block_number"] - from_blocks_ago
if to_blocks_ago != -1:
max_block = dgpo["head_block_number"] - to_blocks_ago
ported_operations = conf["ported_operations"]
ported_types = set([op["type"] for op in ported_operations])
......@@ -91,7 +97,7 @@ def op_for_role(op, conf, keydb, ported_operations):
# Assume it's "active" as a fallback.
return {"operations" : [op], "wif_sigs" : [keydb.get_privkey(tx_signer, "active")]}
def build_actions(conf, min_block, max_block):
def build_actions(conf, min_block, max_block, from_blocks_ago, to_blocks_ago):
"""
Packs transactions rebuilt with operations acquired from source node into blocks of configured size.
"""
......@@ -102,7 +108,7 @@ def build_actions(conf, min_block, max_block):
retry_count += 1
try:
for b in util.batch(repack_operations(conf, keydb, min_block, max_block), conf["transactions_per_block"]):
for b in util.batch(repack_operations(conf, keydb, min_block, max_block, from_blocks_ago, to_blocks_ago), conf["transactions_per_block"]):
for tx in b:
yield ["submit_transaction", {"tx" : tx}]
retry_count = 0
......@@ -130,6 +136,8 @@ def main(argv):
parser.add_argument("-c", "--conffile", default="gatling.conf", dest="conffile", metavar="FILE", help="Specify configuration file")
parser.add_argument("-f", "--from_block", default=-1, dest="min_block_num", metavar="INT", help="Stream from block_num")
parser.add_argument("-t", "--to_block", default=-1, dest="max_block_num", metavar="INT", help="Stream to block_num")
parser.add_argument("-fb", "--from_blocks_ago", default=-1, dest="from_blocks_ago", metavar="INT", help="Stream from relative block_num")
parser.add_argument("-tb", "--to_blocks_ago", default=-1, dest="to_blocks_ago", metavar="INT", help="Stream to relative block_num")
parser.add_argument("-o", "--outfile", default="-", dest="outfile", metavar="FILE", help="Specify output file, - means stdout")
args = parser.parse_args(argv[1:])
......@@ -143,6 +151,8 @@ def main(argv):
min_block_num = int(args.min_block_num)
max_block_num = int(args.max_block_num)
from_blocks_ago = int(args.from_blocks_ago)
to_blocks_ago = int(args.to_blocks_ago)
if min_block_num == -1:
min_block_num = int(conf["min_block_number"])
......@@ -150,7 +160,7 @@ def main(argv):
if max_block_num == -1:
max_block_num = int(conf["max_block_number"])
for action in build_actions(conf, min_block_num, max_block_num):
for action in build_actions(conf, min_block_num, max_block_num, from_blocks_ago, to_blocks_ago):
outfile.write(util.action_to_str(action))
outfile.write("\n")
......
......@@ -165,12 +165,6 @@ def main(argv):
line = line.strip()
cmd, args = json.loads(line)
if metadata and transactions_count > 0 and transactions_count % transactions_per_block == 0:
generate_blocks(steemd, {"count": 1}, cached_dgpo=cached_dgpo, produce_realtime=produce_realtime)
cached_dgpo.reset()
if cmd == "wait_blocks" and args.get("count") == 1 and not args.get("miss_blocks"):
continue
try:
if cmd == "metadata":
metadata = args
......@@ -225,6 +219,13 @@ def main(argv):
fail_file.flush()
if die_on_fail:
raise
if metadata and transactions_count > 0 and transactions_count % transactions_per_block == 0:
generate_blocks(steemd, {"count": 1}, cached_dgpo=cached_dgpo, produce_realtime=produce_realtime)
cached_dgpo.reset()
if cmd == "wait_blocks" and args.get("count") == 1 and not args.get("miss_blocks"):
continue
if __name__ == "__main__":
main(sys.argv)
......@@ -6,6 +6,7 @@ import hashlib
import itertools
import json
import os
import os.path
import random
import sys
......@@ -383,6 +384,10 @@ def build_actions(conf, silent=True):
start_time = now - datetime.timedelta(seconds=predicted_block_count * STEEM_BLOCK_INTERVAL)
miss_blocks = int((start_time - genesis_time).total_seconds()) // STEEM_BLOCK_INTERVAL
miss_blocks = max(miss_blocks-1, 0)
origin_api = None
snapshot_head_block_num = None
snapshot_semver = None
has_backfill = False
metadata = {
"txgen:semver": __version__,
......@@ -408,6 +413,7 @@ def build_actions(conf, silent=True):
major_version, minor_version = semver.split('.')
major_version = int(major_version)
minor_version = int(minor_version)
backfill_file = conf.get("backfill_file", None)
if major_version == SNAPSHOT_MAJOR_VERSION_SUPPORTED:
if not silent:
......@@ -418,14 +424,25 @@ def build_actions(conf, silent=True):
if minor_version < SNAPSHOT_MINOR_VERSION_SUPPORTED:
print("WARNING: Older snapshot encountered.", file=sys.stderr)
if backfill_file and os.path.exists(backfill_file) and os.path.isfile(backfill_file):
num_lines = sum(1 for line in open(backfill_file))
if num_lines > 0:
metadata["backfill_actions:count"] = num_lines
metadata["actions:count"] += num_lines
has_backfill = True
yield ["metadata", metadata]
yield ["wait_blocks", {"count" : 1, "miss_blocks" : miss_blocks}]
yield ["submit_transaction", {"tx" : build_initminer_tx(conf, keydb)}]
for b in util.batch(build_setup_transactions(account_stats, conf, keydb, silent), transactions_per_block):
yield ["wait_blocks", {"count" : 1}]
for tx in b:
yield ["submit_transaction", {"tx" : tx}]
if has_backfill:
with open(conf["backfill_file"], "rb") as f:
for line in input_file:
yield line
for tx in update_witnesses(conf, keydb, "init"):
yield ["submit_transaction", {"tx" : tx}]
for tx in vote_accounts(conf, keydb, "elector", "init"):
......
......@@ -2,6 +2,7 @@
"transactions_per_block" : 40,
"snapshot_file" : "snapshot.json",
"backfill_file" : "backfill.actions",
"min_vesting_per_account" : {"amount" : "1", "precision" : 3, "nai" : "@@000000021"},
"total_port_balance" : {"amount" : "200000000000", "precision" : 3, "nai" : "@@000000021"},
......@@ -62,4 +63,4 @@
"name" : "temp"
}
}
}
\ No newline at end of file
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment