diff --git a/hivemind/functional/__init__.py b/hivemind/functional/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/hivemind/functional/hive_utils/__init__.py b/hivemind/functional/hive_utils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..ded767169a87abc379fd52160f7006e4af95296f
--- /dev/null
+++ b/hivemind/functional/hive_utils/__init__.py
@@ -0,0 +1,4 @@
+from .common import *
+from .hive_node import *
+from .hivemind import *
+from .test_runner import *
diff --git a/hivemind/functional/hive_utils/common.py b/hivemind/functional/hive_utils/common.py
new file mode 100644
index 0000000000000000000000000000000000000000..fb816bd44246985d617b2a4299a8c67dd2d194dc
--- /dev/null
+++ b/hivemind/functional/hive_utils/common.py
@@ -0,0 +1,336 @@
+# -*- coding: utf-8 -*-
+
+import logging
+from junit_xml import TestCase
+import time
+import traceback
+import sys
+
+DEFAULT_LOG_FORMAT = '%(asctime)-15s - %(name)s - %(levelname)s - %(message)s'
+DEFAULT_LOG_LEVEL = logging.INFO
+
+MODULE_NAME = "common"
+logger = logging.getLogger(MODULE_NAME)
+logger.setLevel(DEFAULT_LOG_LEVEL)
+
+ch = logging.StreamHandler(sys.stdout)
+ch.setLevel(DEFAULT_LOG_LEVEL)
+ch.setFormatter(logging.Formatter(DEFAULT_LOG_FORMAT))
+
+logger.addHandler(ch)
+
+def send_rpc_query(target_node : str, payload : dict) -> dict:
+  from requests import post
+  from json import dumps
+  resp = post(target_node, data = dumps(payload))
+  if resp.status_code != 200:
+    print(resp.json())
+    raise Exception("{} returned non 200 code".format(payload["method"]))
+  return resp.json()
+
+def get_random_id() -> str:
+  from uuid import uuid4
+  return str(uuid4())
+
+def get_current_block_number(source_node) -> int:
+  payload = {
+    "jsonrpc" : "2.0",
+    "id" : get_random_id(),
+    "method" : "database_api.get_dynamic_global_properties", 
+    "params" : {}
+  }
+
+  from requests import post
+  from json import dumps, loads
+
+  try:
+    resp = post(source_node, data = dumps(payload))
+    if resp.status_code != 200:
+      return -1
+    data = resp.json()["result"]
+    return int(data["head_block_number"])
+  except Exception as ex:
+    return -1
+
+def wait_n_blocks(source_node : str, blocks : int, timeout : int = 60):
+  from time import sleep
+  starting_block = get_current_block_number(source_node)
+  cntr = 0
+  while starting_block == -1 and cntr < timeout:
+    starting_block = get_current_block_number(source_node)
+    sleep(1)
+    cntr += 1
+  if cntr >= timeout:
+    raise TimeoutError("Timeout in waiting for blocks. Hived is not running?")
+  current_block = starting_block
+  cntr = 0
+  while current_block - starting_block < blocks and cntr < timeout:
+    current_block = get_current_block_number(source_node)
+    sleep(1)
+    cntr += 1
+  if cntr >= timeout:
+    raise TimeoutError("Timeout in waiting for blocks. Hived is not running?")
+
+def debug_generate_blocks(target_node : str, debug_key : str, count : int) -> dict:
+  if count < 0:
+    raise ValueError("Count must be a positive non-zero number")
+  payload = {
+    "jsonrpc": "2.0",
+    "id" : get_random_id(),
+    "method": "debug_node_api.debug_generate_blocks",
+    "params": {
+      "debug_key": debug_key,
+      "count": count,
+      "skip": 0,
+      "miss_blocks": 0,
+      "edit_if_needed": True
+    }
+  }
+  return send_rpc_query(target_node, payload)
+
+def debug_generate_blocks_until(target_node : str, debug_key : str, timestamp : str, generate_sparsely : bool = True) -> dict:
+  payload = {
+    "jsonrpc": "2.0",
+    "id" : get_random_id(),
+    "method": "debug_node_api.debug_generate_blocks_until",
+    "params": {
+      "debug_key": debug_key,
+      "head_block_time": timestamp,
+      "generate_sparsely": generate_sparsely
+    }
+  }
+  return send_rpc_query(target_node, payload)
+
+def debug_set_hardfork(target_node : str, hardfork_id : int) -> dict:
+  if hardfork_id < 0:
+    raise ValueError( "hardfork_id cannot be negative" )
+  payload = {
+    "jsonrpc": "2.0",
+    "id" : get_random_id(),
+    "method": "debug_node_api.debug_set_hardfork",
+    "params": {
+      "hardfork_id": hardfork_id
+    }
+  }
+  return send_rpc_query(target_node, payload)
+
+def debug_has_hardfork(target_node : str, hardfork_id : int) -> dict:
+  payload = {
+    "jsonrpc": "2.0",
+    "id" : get_random_id(),
+    "method": "debug_node_api.debug_has_hardfork",
+    "params": {
+      "hardfork_id": hardfork_id
+    }
+  }
+  return send_rpc_query(target_node, payload)
+
+def debug_get_witness_schedule(target_node : str) -> dict:
+  payload = {
+    "jsonrpc": "2.0",
+    "id" : get_random_id(),
+    "method": "debug_node_api.debug_get_witness_schedule",
+    "params": {}
+  }
+  return send_rpc_query(target_node, payload)
+
+def debug_get_hardfork_property_object(target_node : str) -> dict:
+  payload = {
+    "jsonrpc": "2.0",
+    "id" : get_random_id(),
+    "method": "debug_node_api.debug_get_hardfork_property_object",
+    "params": {}
+  }
+  return send_rpc_query(target_node, payload)
+
+def get_date_as_isostr(date):
+  return date.replace(microsecond=0).isoformat()
+
+
+def get_isostr_start_end_date(now, start_date_delta, end_date_delta):
+  from datetime import timedelta
+
+  start_date = now + timedelta(days = start_date_delta)
+  end_date = start_date + timedelta(days = end_date_delta)
+
+  start_date = start_date.replace(microsecond=0).isoformat()
+  end_date = end_date.replace(microsecond=0).isoformat()
+
+  return start_date, end_date
+
+def save_screen_cfg(cfg_file_name, log_file_path):
+  """Creates a config file for screen command. In config file we configure logging path and interval.
+
+  Args:
+      cfg_file_name -- file name for screen config file,
+      log_file_path -- path to log file.
+  """
+  with open(cfg_file_name, "w") as cfg:
+    cfg.write("logfile {0}\n".format(log_file_path))
+    cfg.write("deflog on\n")
+    cfg.write("logfile flush 1\n")
+
+def save_pid_file(pid_file_name, exec_name, port, start_time):
+  """Creates PID file which indicate running keosd or nodeos process.
+
+  Args:
+      pid_file_name -- file name for pid file,
+      exec_name -- name of the exectutable bound to this pid file,
+      port -- port number for running executable,
+      start_time -- execution start time.
+  """
+  with open(pid_file_name, "w") as pid_file:
+    pid_file.write("{0}-{1}-{2}\n".format(exec_name, port, start_time))
+
+def wait_for_string_in_file(log_file_name, string, timeout):
+  """Blocks program execution until a given string is found in given file.
+
+  Args:
+      log_file_name -- path to scanned file,
+      string -- sting to be found,
+      timout -- block timeout in seconds, after this time exception will be raised.
+  """
+  logger.info("Waiting for string \"{}\" in file {}".format(string, log_file_name))
+  step = 1
+  to_timeout = 0.
+  from time import sleep
+  from os.path import exists
+  while True:
+    sleep(step)
+    to_timeout = to_timeout + step
+    if timeout is not None and to_timeout >= timeout:
+      msg = "Timeout during wait for string {0}".format(string)
+      logger.error(msg)
+      raise TimeoutError(msg)
+    if exists(log_file_name):
+      with open(log_file_name, "r") as log_file:
+        leave = False
+        for line in log_file.readlines():
+          if string in line:
+            leave = True
+            break
+        if leave:
+          break
+
+def get_last_line_of_file(file_name):
+  """Reads and returns last line of given file.
+
+  Args:
+      file_name -- path to a file.
+  """
+  last_line = ""
+  from os import SEEK_CUR, SEEK_END
+  with open(file_name, "r") as f:
+    f.seek(-2, SEEK_END)
+    while f.read(1) != b'\n':
+      f.seek(-2, SEEK_CUR) 
+    last_line = f.readline().decode()
+  return last_line
+
+def kill_process(pid_file_name, proc_name, ip_address, port):
+  """Attempts to stop a process with given PID listening on port at ip_address. Process data is read from pid_file_name.
+
+  Args:
+      pid_file_name -- path to pid file,
+      proc_name -- executable name,
+      ip_address -- executable ip address,
+      port -- executable port number.
+  """
+  logger.info("Terminating {0} process running on port {1}".format(proc_name, port))
+  pids = []
+  pid_name = None
+  try:
+    from os import popen, kill, remove
+    from os.path import exists
+    with open(pid_file_name, "r") as pid_file:
+      pid_name = pid_file.readline()
+      pid_name = pid_name.strip()
+    if pid_name is not None:
+      for line in popen("ps ax | grep " + proc_name + " | grep -v grep"):
+        if pid_name in line:
+          line = line.strip().split()
+          pids.append(line[0])
+      for pid in pids:
+        for line in popen("ps --no-header --ppid {0}".format(pid)):
+          line = line.strip().split()
+          kill(int(line[0]), 2)
+        kill(int(pid), 2)
+      if exists(pid_file_name):
+        remove(pid_file_name)
+      logger.info("Done...")
+    else:
+      logger.warning("No such process: {0}".format(pid_name))
+  except Exception as ex:
+    logger.error("Process {0} cannot be killed. Reason: {1}".format(proc_name, ex))
+
+def detect_process_by_name(proc_name, exec_path, port):
+  """Checks if  process of given name runs on given ip_address and port.
+
+  Args:
+      proc_name -- process name,
+      exec_path -- path to executable,
+      port -- process port.
+  """
+  pids = []
+  from os import popen
+  for line in popen("ps ax | grep " + proc_name + " | grep -v grep"):
+    if exec_path in line and str(port) in line:
+      line = line.strip().split()
+      pids.append(line[0])
+  if pids:
+    return True
+  return False
+
+
+BLOCK_TYPE_HEADBLOCK = "within_reversible_block"
+BLOCK_TYPE_IRREVERSIBLE = "within_irreversible_block"
+def block_until_transaction_in_block(node_url, transaction_id, block_type = BLOCK_TYPE_HEADBLOCK, timeout = 60.):
+  logger.info("Block until transaction_id: {0} is {1}".format(transaction_id, block_type))
+  from time import sleep
+  from requests import post
+  step = 1.
+  timeout_cnt = 0.
+  while True:
+    query = {
+      "id" : "{}".format(get_random_id()),
+      "jsonrpc":"2.0", 
+      "method":"transaction_status_api.find_transaction", 
+      "params": {
+        "transaction_id": transaction_id
+      }
+    }
+
+    response = post(node_url, json=query)
+    transaction_status = response.get("status", None)
+    if transaction_status is not None:
+      if transaction_status == block_type:
+        logger.info("Transaction id: {0} is {1}".format(transaction_id, block_type))
+        return
+      logger.info("Transaction id: {0} not {1}".format(transaction_id, block_type))
+    sleep(step)
+    timeout_cnt = timeout_cnt + step
+    if timeout_cnt > timeout:
+      msg = "Timeout reached during block_until_transaction_in_block"
+      logger.error(msg)
+      raise TimeoutError(msg)
+
+
+junit_test_cases = []
+def junit_test_case(method):
+    def log_test_case(*args, **kw):
+        start_time = time.time()
+        error = None
+        try:
+            result = method(*args, **kw)
+        except:
+            e = sys.exc_info()
+            error = traceback.format_exception(e[0], e[1], e[2])
+            raise
+        finally:
+            end_time = time.time()
+            test_case = TestCase(method.__name__, method.__name__, end_time - start_time, '', '')
+            if error is not None:
+                test_case.add_failure_info(output = error)
+            junit_test_cases.append(test_case)
+    return log_test_case
+
diff --git a/hivemind/functional/hive_utils/hive_node.py b/hivemind/functional/hive_utils/hive_node.py
new file mode 100644
index 0000000000000000000000000000000000000000..f10cb2279938f0068bdd35c95bba41815434ba1d
--- /dev/null
+++ b/hivemind/functional/hive_utils/hive_node.py
@@ -0,0 +1,282 @@
+#!/usr/bin/python3
+
+import json
+import logging
+import sys
+import os
+import subprocess
+import datetime
+
+from .common import DEFAULT_LOG_FORMAT, DEFAULT_LOG_LEVEL
+
+from threading import Lock
+
+MODULE_NAME = "hive_node"
+
+logger = logging.getLogger(MODULE_NAME)
+logger.setLevel(DEFAULT_LOG_LEVEL)
+
+ch = logging.StreamHandler(sys.stdout)
+ch.setLevel(DEFAULT_LOG_LEVEL)
+ch.setFormatter(logging.Formatter(DEFAULT_LOG_FORMAT))
+
+logger.addHandler(ch)
+
+from typing import NamedTuple
+
+class HiveNode(object):
+  hived_binary = None
+  hived_process = None
+  hived_lock = Lock()
+  hived_data_dir = None
+  hived_args = list()
+
+  logger = logging.getLogger(MODULE_NAME + ".HiveNode")
+
+  def __init__(self, binary_path : str, working_dir : str, binary_args : list):
+    self.logger.info("New hive node")
+    if not os.path.exists(binary_path):
+      raise ValueError("Path to hived binary is not valid.")
+    if not os.path.isfile(binary_path):
+      raise ValueError("Path to hived binary must point to file")
+    self.hived_binary = binary_path
+
+    if not os.path.exists(working_dir):
+      raise ValueError("Path to data directory is not valid")
+    if not os.path.isdir(working_dir):
+      raise ValueError("Data directory is not valid directory")
+    self.hived_data_dir = working_dir
+
+    if binary_args:
+      self.hived_args.extend(binary_args)
+
+  def __enter__(self):
+    self.hived_lock.acquire()
+
+    from subprocess import Popen, PIPE
+    from time import sleep
+
+    hived_command = [
+      self.hived_binary,
+      "--data-dir={}".format(self.hived_data_dir)
+    ]
+    hived_command.extend(self.hived_args)
+
+    self.hived_process = Popen(hived_command, stdout=PIPE, stderr=None)
+    self.hived_process.poll()
+    sleep(5)
+
+    if self.hived_process.returncode:
+      raise Exception("Error during starting node")
+
+  def get_output(self):
+    out, err = self.hived_process.communicate()
+    return out
+
+  def __exit__(self, exc, value, tb):
+    self.logger.info("Closing node")
+    from signal import SIGINT, SIGTERM
+    from time import sleep
+
+    if self.hived_process is not None:
+      self.hived_process.poll()
+      if self.hived_process.returncode != 0:
+        self.hived_process.send_signal(SIGINT)
+        sleep(7)
+        self.hived_process.poll()
+        if self.hived_process.returncode != 0:
+          self.hived_process.send_signal(SIGTERM)
+          sleep(7)
+          self.hived_process.poll()
+          if self.hived_process.returncode != 0:
+            raise Exception("Error during stopping node. Manual intervention required.")
+    self.hived_process = None
+    self.hived_lock.release()
+
+class HiveNodeInScreen(object):
+  def __init__(self, hive_executable, working_dir, config_src_path, run_using_existing_data = False, node_is_steem = False):
+    self.logger = logging.getLogger(MODULE_NAME + ".HiveNodeInScreen")
+    self.logger.info("New hive node")
+    self.hive_executable = hive_executable
+    self.working_dir = working_dir
+    self.config_src_path = config_src_path
+    self.node_is_steem = node_is_steem
+
+    # usefull when we want to do a replay
+    if not run_using_existing_data:
+      from shutil import rmtree, copy
+      # remove old data from node
+      if os.path.exists(self.working_dir):
+        rmtree(self.working_dir)
+      os.makedirs(self.working_dir+"/blockchain")
+      # copy config file to working dir
+      copy(self.config_src_path, self.working_dir + "/config.ini")
+
+    self.hive_config = self.parse_node_config_file(self.working_dir + "/config.ini")
+    self.ip_address, self.port = self.hive_config["webserver-http-endpoint"][0].split(":")
+    self.ip_address = "http://{}".format(self.ip_address)
+    self.node_running = False
+
+  def get_from_config(self, key):
+    return self.hive_config.get(key, None)
+
+  def get_node_url(self):
+    return "{}:{}/".format(self.ip_address, self.port)
+
+  def is_running(self):
+    return self.node_running
+
+  def parse_node_config_file(self, config_file_name):
+    ret = dict()
+    lines = None
+    with open(config_file_name, "r") as f:
+      lines = f.readlines()
+
+    for line in lines:
+      proc_line = line.strip()
+      if proc_line:
+        if proc_line.startswith("#"):
+            continue
+        k, v = proc_line.split("=", 1)
+        k = k.strip()
+        v = v.strip()
+        if k in ret:
+          ret[k].append(v)
+        else:
+          ret[k] = [v]
+    return ret
+
+  def run_hive_node(self, additional_params = [], wait_for_blocks = True):
+    from .common import detect_process_by_name, save_screen_cfg, save_pid_file, wait_n_blocks, wait_for_string_in_file, kill_process
+
+    if detect_process_by_name("hived" if not self.node_is_steem else "steemd", self.hive_executable, self.port):
+      msg = "{0} process is running on {1}:{2}. Please terminate that process and try again.".format("hive", self.ip_address, self.port)
+      raise ProcessLookupError(msg)
+
+    self.logger.info("*** START NODE at {0}:{1} in {2}".format(self.ip_address, self.port, self.working_dir))
+
+    parameters = [
+      self.hive_executable,
+      "-d",
+      self.working_dir,
+      "--advanced-benchmark",
+      "--sps-remove-threshold",
+      "-1"
+    ]
+
+    parameters = parameters + additional_params
+    
+    self.pid_file_name = "{0}/run_hive-{1}.pid".format(self.working_dir, self.port)
+    current_time_str = datetime.datetime.now().strftime("%Y-%m-%d")
+    log_file_name = "{0}/{1}-{2}-{3}.log".format(self.working_dir, "hived", self.port, current_time_str)
+    screen_cfg_name = "{0}/hive_screen-{1}.cfg".format(self.working_dir, self.port)
+
+    save_screen_cfg(screen_cfg_name, log_file_name)
+    screen_params = [
+      "screen",
+      "-m",
+      "-d",
+      "-L",
+      "-c",
+      screen_cfg_name,
+      "-S",
+      "{0}-{1}-{2}".format("hived", self.port, current_time_str)
+    ]
+
+    parameters = screen_params + parameters
+    self.logger.info("Running hived with command: {0}".format(" ".join(parameters)))
+      
+    try:
+      subprocess.Popen(parameters)
+      save_pid_file(self.pid_file_name, "hived", self.port, current_time_str)
+      # we will allow for screen to setup and die maybe?
+      from time import sleep
+      sleep(5)
+      # now it should be dead
+
+      if not detect_process_by_name("hived" if not self.node_is_steem else "steemd", self.hive_executable, self.port):
+        msg = "{0} process is not running on {1}:{2}. Please check logs.".format("hive", self.ip_address, self.port)
+        raise ProcessLookupError(msg)
+
+      if "--replay-blockchain" in parameters:
+        wait_for_string_in_file(log_file_name, "start listening for ws requests", None)
+      else:
+        if wait_for_blocks:
+          wait_n_blocks("{}:{}".format(self.ip_address, self.port), 5)
+        else:
+          wait_for_string_in_file(log_file_name, "start listening for ws requests", 20.)
+      self.node_running = True
+      self.logger.info("Node at {0}:{1} in {2} is up and running...".format(self.ip_address, self.port, self.working_dir))
+    except Exception as ex:
+      self.logger.error("Exception during hived run: {0}".format(ex))
+      kill_process(self.pid_file_name, "hived" if not self.node_is_steem else "steemd", self.ip_address, self.port)
+      self.node_running = False
+
+
+  def stop_hive_node(self):
+    from .common import kill_process
+    self.logger.info("Stopping node at {0}:{1}".format(self.ip_address, self.port))
+    kill_process(self.pid_file_name, "hived" if not self.node_is_steem else "steemd", self.ip_address, self.port)
+    self.node_running = False
+
+  def __enter__(self):
+    self.run_hive_node()
+
+  def __exit__(self, exc, value, tb):
+    self.stop_hive_node()
+
+if __name__ == "__main__":
+  KEEP_GOING = True
+
+  def sigint_handler(signum, frame):
+    logger.info("Shutting down...")
+    global KEEP_GOING
+    from time import sleep
+    KEEP_GOING = False
+    sleep(3)
+    sys.exit(0)
+
+  def main():
+    try:
+      import signal
+      signal.signal(signal.SIGINT, sigint_handler)
+
+      plugins = ["chain","p2p","webserver","json_rpc","debug_node"]
+      config = "# Simple config file\n" \
+        + "shared-file-size = 1G\n" \
+        + "enable-stale-production = true\n" \
+        + "p2p-endpoint = 127.0.0.1:2001\n" \
+        + "webserver-http-endpoint = 127.0.0.1:8095\n" \
+        + "webserver-ws-endpoint = 127.0.0.1:8096\n" \
+        + "plugin = witness debug_node {}\n".format(" ".join(plugins)) \
+        + "plugin = database_api debug_node_api block_api\n" \
+        + "witness = \"initminer\"\n" \
+        + "private-key = 5JNHfZYKGaomSFvd4NUdQ9qMcEAC43kujbfjueTHpVapX1Kzq2n\n" \
+        + "required-participation = 0"
+
+      binary_path = "/home/dariusz-work/Builds/hive/programs/hived/hived"
+      work_dir = "/home/dariusz-work/hive-data"
+
+      print(config)
+
+      with open(work_dir + "/config.ini", "w") as conf_file:
+        conf_file.write(config)
+
+      node = HiveNode(binary_path, work_dir, [])
+      from time import sleep
+      from .common import wait_n_blocks, debug_generate_blocks
+      with node:
+        print("Waiting 10 blocks")
+        wait_n_blocks("http://127.0.0.1:8095", 10)
+        print("Done...")
+        print(debug_generate_blocks("http://127.0.0.1:8095", "5JHNbFNDg834SFj8CMArV6YW7td4zrPzXveqTfaShmYVuYNeK69", 100))
+        while(KEEP_GOING):
+          sleep(1)
+    except Exception as ex:
+      logger.exception("Exception: {}".format(ex))
+      sys.exit(1)
+  
+  main()
+
+
+
diff --git a/hivemind/functional/hive_utils/hivemind.py b/hivemind/functional/hive_utils/hivemind.py
new file mode 100644
index 0000000000000000000000000000000000000000..96a627028ee74d7fb13155a98f30960c61e9a81b
--- /dev/null
+++ b/hivemind/functional/hive_utils/hivemind.py
@@ -0,0 +1,126 @@
+#!/usr/bin/python3
+
+import json
+import logging
+import sys
+import os
+import subprocess
+import datetime
+
+from .common import DEFAULT_LOG_FORMAT, DEFAULT_LOG_LEVEL
+
+MODULE_NAME = "hivemind"
+
+logger = logging.getLogger(MODULE_NAME)
+logger.setLevel(DEFAULT_LOG_LEVEL)
+
+ch = logging.StreamHandler(sys.stdout)
+ch.setLevel(DEFAULT_LOG_LEVEL)
+ch.setFormatter(logging.Formatter(DEFAULT_LOG_FORMAT))
+
+logger.addHandler(ch)
+
+class HivemindInScreen(object):
+  def __init__(self, hivemind_executable, mode, hivemind_port, database_url, working_dir = ".", hived_address="http://127.0.0.1", hived_port=8090):
+    self.logger = logging.getLogger(MODULE_NAME + ".HivemindInScreen")
+    self.logger.info("New hivemind instance")
+    self.hivemind_executable = hivemind_executable
+    assert mode in ['sync', 'server'], "Allowed modes are: `sync` and `server`"
+    self.hivemind_mode = mode
+    self.hivemind_port = hivemind_port
+    self.hived_address = hived_address
+    self.hived_port = hived_port
+    self.hivemind_database_url = database_url
+    self.working_dir = working_dir
+    self.hivemind_running = False
+
+  def get_address(self):
+    return "http://{}:{}/".format("127.0.0.1", self.hivemind_port)
+
+  def is_running(self):
+    return self.hivemind_running
+
+  def run_hivemind(self, params = []):
+    from .common import detect_process_by_name, save_screen_cfg, save_pid_file, wait_for_string_in_file, kill_process
+    from json import dumps
+    start_params = [
+      self.hivemind_executable,
+      self.hivemind_mode,
+      '--steemd-url',
+      dumps({"default":"{}:{}".format(self.hived_address, self.hived_port)}),
+      '--database-url',
+      self.hivemind_database_url,
+      '--http-server-port',
+      str(self.hivemind_port)
+    ]
+
+    self.pid_file_name = "{0}/run_hivemind-{1}.pid".format(self.working_dir, self.hivemind_port)
+    current_time_str = datetime.datetime.now().strftime("%Y-%m-%d")
+    self.log_file_name = "{0}/{1}-{2}-{3}.log".format(self.working_dir, "hive", self.hivemind_port, current_time_str)
+    screen_cfg_name = "{0}/hive_screen-{1}.cfg".format(self.working_dir, self.hivemind_port)
+
+    start_params = start_params + params
+
+    save_screen_cfg(screen_cfg_name, self.log_file_name)
+    screen_params = [
+      "screen",
+      "-m",
+      "-d",
+      "-L",
+      "-c",
+      screen_cfg_name,
+      "-S",
+      "{0}-{1}-{2}".format("hive", self.hivemind_port, current_time_str)
+    ]
+
+    start_params = screen_params + start_params
+    self.logger.info("Running hivemind with command: {0}".format(" ".join(start_params)))
+
+    try:
+      save_pid_file(self.pid_file_name, "hive", self.hivemind_port, current_time_str)
+      subprocess.Popen(start_params)
+      # we will allow for screen to setup and die maybe?
+      from time import sleep
+      sleep(5)
+      # now it should be dead
+
+      if not detect_process_by_name("hive", self.hivemind_executable, self.hivemind_port):
+        msg = "{0} process is not running on {1}:{2}. Please check logs.".format("hive", "http://0.0.0.0", self.hivemind_port)
+        raise ProcessLookupError(msg)
+
+      self.hivemind_running = True
+      self.logger.info("Hivemind at {0}:{1} in {2} is up and running...".format("http://0.0.0.0", self.hivemind_port, self.working_dir))
+    except Exception as ex:
+      self.logger.exception("Exception during hivemind run: {0}".format(ex))
+      kill_process(self.pid_file_name, "hive", "http://0.0.0.0", self.hivemind_port)
+      self.hivemind_running = False
+
+  def stop_hivemind(self):
+    from .common import kill_process
+    self.logger.info("Stopping hivemind at {0}:{1}".format("http://0.0.0.0", self.hivemind_port))
+    kill_process(self.pid_file_name, "hive", "http://0.0.0.0", self.hivemind_port)
+    self.node_running = False
+
+  def __enter__(self):
+    self.run_hivemind()
+
+  def __exit__(self, exc, value, tb):
+    self.stop_hivemind()
+
+if __name__ == "__main__":
+  from time import sleep
+  def main():
+    hivemind = None
+    try:
+      hivemind = HivemindInScreen("hive", "sync", 8080, "postgresql://hive@localhost:5432/hive3", "/tmp")
+      hivemind.run_hivemind()
+      sleep(30)
+      if hivemind is not None and hivemind.is_running():
+        hivemind.stop_hivemind()
+    except Exception as ex:
+      if hivemind is not None and hivemind.is_running():
+        hivemind.stop_hivemind()
+      logger.exception("Exception: {}".format(ex))
+      sys.exit(1)
+
+  main()
\ No newline at end of file
diff --git a/hivemind/functional/hive_utils/resources/config.ini.in b/hivemind/functional/hive_utils/resources/config.ini.in
new file mode 100644
index 0000000000000000000000000000000000000000..eee7e54b7eaa72399394bf04d11bfded9d0ea4c6
--- /dev/null
+++ b/hivemind/functional/hive_utils/resources/config.ini.in
@@ -0,0 +1,101 @@
+# Appender definition json: {"appender", "stream", "file"} Can only specify a file OR a stream
+log-appender = {"appender":"stderr","stream":"std_error"} {"appender":"p2p","file":"logs/p2p/p2p.log"}
+
+# Logger definition json: {"name", "level", "appender"}
+log-logger = {"name":"default","level":"all","appender":"stderr"} {"name":"p2p","level":"all","appender":"p2p"}
+
+# Whether to print backtrace on SIGSEGV
+backtrace = yes
+
+# Plugin(s) to enable, may be specified multiple times
+plugin = witness database_api account_by_key_api network_broadcast_api condenser_api block_api transaction_status_api debug_node_api market_history_api account_history_rocksdb account_history_api
+
+# Disables automatic account history trimming
+history-disable-pruning = 0
+
+# The location of the rocksdb database for account history. By default it is $DATA_DIR/blockchain/account-history-rocksdb-storage
+account-history-rocksdb-path = "blockchain/account-history-rocksdb-storage"
+
+# Where to export data (NONE to discard)
+block-data-export-file = NONE
+
+# How often to print out block_log_info (default 1 day)
+block-log-info-print-interval-seconds = 86400
+
+# Whether to defer printing until block is irreversible
+block-log-info-print-irreversible = 1
+
+# Where to print (filename or special sink ILOG, STDOUT, STDERR)
+block-log-info-print-file = ILOG
+
+# the location of the chain shared memory files (absolute path or relative to application data dir)
+shared-file-dir = "blockchain"
+
+# Size of the shared memory file. Default: 54G. If running a full node, increase this value to 200G.
+shared-file-size = 8G
+
+# A 2 precision percentage (0-10000) that defines the threshold for when to autoscale the shared memory file. Setting this to 0 disables autoscaling. Recommended value for consensus node is 9500 (95%). Full node is 9900 (99%)
+shared-file-full-threshold = 0
+
+# A 2 precision percentage (0-10000) that defines how quickly to scale the shared memory file. When autoscaling occurs the file's size will be increased by this percent. Setting this to 0 disables autoscaling. Recommended value is between 1000-2000 (10-20%)
+shared-file-scale-rate = 0
+
+# Set the maximum size of cached feed for an account
+follow-max-feed-size = 500
+
+# Block time (in epoch seconds) when to start calculating feeds
+follow-start-feeds = 0
+
+# Track market history by grouping orders into buckets of equal size measured in seconds specified as a JSON array of numbers
+market-history-bucket-size = [15,60,300,3600,86400]
+
+# How far back in time to track history for each bucket size, measured in the number of buckets (default: 5760)
+market-history-buckets-per-size = 5760
+
+# The IP address and port of a remote peer to sync with.
+p2p-seed-node = 127.0.0.1:2001
+
+# Skip rejecting transactions when account has insufficient RCs. This is not recommended.
+rc-skip-reject-not-enough-rc = 0
+
+# Generate historical resource credits
+rc-compute-historical-rc = 0
+
+# Size to batch statsd messages.
+statsd-batchsize = 1
+
+# Block time (in epoch seconds) when to start calculating promoted content. Should be 1 week prior to current time.
+tags-start-promoted = 0
+
+# Skip updating tags on startup. Can safely be skipped when starting a previously running node. Should not be skipped when reindexing.
+tags-skip-startup-update = 0
+
+# Defines the number of blocks from the head block that transaction statuses will be tracked.
+transaction-status-block-depth = 64000
+
+# Defines the block number the transaction status plugin will begin tracking.
+transaction-status-track-after-block = 0
+
+# Local http endpoint for webserver requests.
+webserver-http-endpoint = 127.0.0.1:8090
+
+# Local websocket endpoint for webserver requests.
+webserver-ws-endpoint = 127.0.0.1:8090
+
+# Number of threads used to handle queries. Default: 32.
+webserver-thread-pool-size = 32
+
+# Enable block production, even if the chain is stale.
+enable-stale-production = 0
+
+# Percent of witnesses (0-99) that must be participating in order to produce blocks
+required-participation = 0
+
+# name of witness controlled by this node (e.g. initwitness )
+witness = "initminer"
+
+# WIF PRIVATE KEY to be used by one or more witnesses or miners
+private-key = 5JNHfZYKGaomSFvd4NUdQ9qMcEAC43kujbfjueTHpVapX1Kzq2n
+
+# Skip enforcing bandwidth restrictions. Default is true in favor of rc_plugin.
+witness-skip-enforce-bandwidth = 1
diff --git a/hivemind/functional/hive_utils/test_runner.py b/hivemind/functional/hive_utils/test_runner.py
new file mode 100644
index 0000000000000000000000000000000000000000..1ea77b2ac603e4c881368d4621e4a3729d4362e4
--- /dev/null
+++ b/hivemind/functional/hive_utils/test_runner.py
@@ -0,0 +1,143 @@
+#!/usr/bin/python3
+import sys
+
+from .hivemind import *
+from .hive_node import *
+from .common import wait_for_string_in_file
+
+from uuid import uuid4
+from time import sleep
+import logging
+import os
+
+LOG_LEVEL = logging.INFO
+LOG_FORMAT = "%(asctime)-15s - %(name)s - %(levelname)s - %(message)s"
+MAIN_LOG_PATH = "test_runner.log"
+log_dir = os.environ.get("TEST_LOG_DIR", None)
+if log_dir is not None:
+    MAIN_LOG_PATH = log_dir + "/" + MAIN_LOG_PATH
+else:
+    MAIN_LOG_PATH = "./" + MAIN_LOG_PATH
+
+MODULE_NAME = "test_runner"
+logger = logging.getLogger(MODULE_NAME)
+logger.setLevel(LOG_LEVEL)
+
+ch = logging.StreamHandler(sys.stdout)
+ch.setLevel(LOG_LEVEL)
+ch.setFormatter(logging.Formatter(LOG_FORMAT))
+
+fh = logging.FileHandler(MAIN_LOG_PATH)
+fh.setLevel(LOG_LEVEL)
+fh.setFormatter(logging.Formatter(LOG_FORMAT))
+
+if not logger.hasHandlers():
+    logger.addHandler(ch)
+    logger.addHandler(fh)
+
+try:
+    from beem import Hive
+except Exception as ex:
+    logger.error("beem library is not installed.")
+    sys.exit(1)
+
+class TestRunner(object):
+    def __init__(self, hived_path, wif, node_url, database_url, working_dir, config_path):
+        self.logger = logging.getLogger(MODULE_NAME + ".TestRunner")
+        self.hived_node = None
+        self.hived_node_client = None
+        self.hivemind_sync = None
+        self.hivemind_server = None
+        self.hived_keys = [wif]
+
+        self.hived_path = hived_path
+        self.hived_wif = wif
+        self.hived_node_url = node_url
+        self.hivemind_database_url = database_url
+        self.hived_working_dir = working_dir
+        self.hived_config_path = config_path
+
+
+    def on_before_hived_run(self):
+        pass
+
+    def on_before_hivemind_sync_run(self):
+        pass
+
+    def on_before_hivemind_server_run(self):
+        pass
+
+    def on_after_hivemind_server_run(self):
+        from time import sleep
+        sleep(60)
+
+    def run(self):
+        self.logger.info("Executing before hived run hook")
+        self.on_before_hived_run()
+
+        if self.hived_path:
+            self.logger.info("Running hived via {} in {} with config {}".format(self.hived_path, 
+                self.hived_working_dir, 
+                self.hived_config_path)
+            )
+            
+            self.hived_node = HiveNodeInScreen(
+                self.hived_path, 
+                self.hived_working_dir, 
+                self.hived_config_path
+            )
+
+        if self.hived_node is not None:
+            self.hived_node.run_hive_node(["--enable-stale-production"])
+        try:
+            if self.hived_node.is_running():
+                self.hived_node_client = Hive(node = [self.hived_node_url], no_broadcast = False, 
+                    keys = self.hived_keys
+                )
+
+                self.logger.info("Chain prefix is: {}".format(self.hived_node_client.prefix))
+                self.logger.info("Chain ID is: {}".format(self.hived_node_client.get_config()["HIVE_CHAIN_ID"]))
+
+                self.logger.info("Executing before hivemind initial sync run hook")
+                self.on_before_hivemind_sync_run()
+
+                self.logger.info("Start hivemind instance and perform initial sync")
+                self.hivemind_sync = HivemindInScreen("hive", "sync", 8080, self.hivemind_database_url, "/tmp")
+                self.hivemind_sync.run_hivemind()
+                wait_for_string_in_file(self.hivemind_sync.log_file_name, "Initial sync complete", None)
+                self.logger.info("Initial sync complete, switching to live sync mode")
+
+                self.logger.info("Executing before hivemind server run hook")
+                self.on_before_hivemind_server_run()
+
+                self.logger.info("Start hivemind instance as server")
+                self.hivemind_server = HivemindInScreen("hive", "server", 8081, self.hivemind_database_url, "/tmp")
+                self.hivemind_server.run_hivemind()
+
+                self.logger.info("Executing after hivemind server run hook")
+                self.on_after_hivemind_server_run()
+
+                self.logger.info("Stopping hived and all hivemind instances")
+                if self.hivemind_server is not None:
+                    self.hivemind_server.stop_hivemind()
+                
+                if self.hivemind_sync is not None:
+                    self.hivemind_sync.stop_hivemind()
+
+                if self.hived_node is not None:
+                    self.hived_node.stop_hive_node()
+
+                return True
+            return False
+        except Exception as ex:
+            self.logger.exception("Exception: {}".format(ex))
+            if self.hivemind_server is not None:
+                self.hivemind_server.stop_hivemind()
+
+            if self.hivemind_sync is not None:
+                self.hivemind_sync.stop_hivemind()
+
+            if self.hived_node is not None: 
+                self.hived_node.stop_hive_node()
+            raise ex
+
diff --git a/hivemind/functional/tests/communities/communities_create.py b/hivemind/functional/tests/communities/communities_create.py
new file mode 100644
index 0000000000000000000000000000000000000000..6d683f2098dd3f85d877c8ab916fdb35f9884f6b
--- /dev/null
+++ b/hivemind/functional/tests/communities/communities_create.py
@@ -0,0 +1,210 @@
+#!/usr/bin/python3
+
+import sys
+sys.path.append("../../")
+import hive_utils
+
+from uuid import uuid4
+from time import sleep
+import logging
+import os
+
+
+LOG_LEVEL = logging.INFO
+LOG_FORMAT = "%(asctime)-15s - %(name)s - %(levelname)s - %(message)s"
+MAIN_LOG_PATH = "functional_example.log"
+log_dir = os.environ.get("TEST_LOG_DIR", None)
+if log_dir is not None:
+    MAIN_LOG_PATH = log_dir + "/" + MAIN_LOG_PATH
+else:
+    MAIN_LOG_PATH = "./" + MAIN_LOG_PATH
+
+
+MODULE_NAME = "Functional-Example"
+logger = logging.getLogger(MODULE_NAME)
+logger.setLevel(LOG_LEVEL)
+
+ch = logging.StreamHandler(sys.stdout)
+ch.setLevel(LOG_LEVEL)
+ch.setFormatter(logging.Formatter(LOG_FORMAT))
+
+fh = logging.FileHandler(MAIN_LOG_PATH)
+fh.setLevel(LOG_LEVEL)
+fh.setFormatter(logging.Formatter(LOG_FORMAT))
+
+if not logger.hasHandlers():
+  logger.addHandler(ch)
+  logger.addHandler(fh)
+
+try:
+    from beem import Hive
+except Exception as ex:
+    logger.error("beem library is not installed.")
+    sys.exit(1)
+
+def create_accounts(node, creator, accounts):
+    """ Create accounts given as a list using `creator` as a creator account """
+    for account in accounts:
+        logger.info("Creating account: {}".format(account['name']))
+        node.create_account(account['name'], 
+            owner_key=account['public_key'], 
+            active_key=account['public_key'], 
+            posting_key=account['public_key'],
+            memo_key=account['public_key'],
+            store_keys = False,
+            creator=creator,
+            asset='TESTS'
+        )
+    hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def transfer_to_vesting(node, from_account, accounts, amount, asset):
+    """ Transfer assets to vesting from `from_account` to accounts given in list """
+    from beem.account import Account
+    for acnt in accounts:
+        logger.info("Transfer to vesting from {} to {} amount {} {}".format(
+            from_account, acnt['name'], amount, asset)
+        )
+        acc = Account(from_account, hive_instance=node)
+        acc.transfer_to_vesting(amount, to = acnt['name'], asset = asset)
+    hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def transfer_assets_to_accounts(node, from_account, accounts, amount, asset, wif=None):
+    """ Transfer assets `from_account` to accounts given in list """
+    from beem.account import Account
+    for acnt in accounts:
+        logger.info("Transfer from {} to {} amount {} {}".format(from_account, 
+            acnt['name'], amount, asset)
+        )
+        acc = Account(from_account, hive_instance=node)
+        acc.transfer(acnt['name'], amount, asset, memo = "initial transfer")
+    if wif is not None:
+        hive_utils.debug_generate_blocks(node.rpc.url, wif, 5)
+    else:
+        hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def get_permlink(account):
+    """ Helper for permlink creation """
+    return "functional-example-title-{}".format(account)
+
+
+def create_posts(node, accounts, wif=None):
+    """ Create example posts - one post for one account. Accounts given in list format """
+    logger.info("Creating posts...")
+    for acnt in accounts:
+        logger.info("New post ==> ({},{},{},{},{})".format(
+            "Hivepy example post title [{}]".format(acnt['name']), 
+            "Hivepy example post body [{}]".format(acnt['name']), 
+            acnt['name'], 
+            get_permlink(acnt['name']), 
+            "example"
+        ))
+        node.post("Hivepy example post title [{}]".format(acnt['name']), 
+            "Hivepy example post body [{}]".format(acnt['name']), 
+            acnt['name'], 
+            permlink = get_permlink(acnt['name']), 
+            tags = "example")
+    if wif is not None:
+        hive_utils.debug_generate_blocks(node.rpc.url, wif, 5)
+    else:
+        hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+def print_balance(node, accounts):
+    """ Print balance for accounts given as a list """
+    from beem.account import Account
+    balances = []
+    balances_str = []
+    for acnt in accounts:
+        ret = Account(acnt['name'], hive_instance=node).json()
+        hbd = ret.get('hbd_balance', None)
+        if hbd is not None:
+            hbd = hbd.get('amount')
+        balances_str.append("{}:{}".format(acnt['name'], hbd))
+        balances.append(hbd)
+    logger.info("Balances ==> {}".format(",".join(balances_str)))
+    return balances
+
+class HivemindFunctionalRunner(hive_utils.test_runner.TestRunner):
+    def __init__(self, creator, hived_path, wif, node_url, database_url, working_dir, config_path):
+        super().__init__(hived_path, wif, node_url, database_url, working_dir, config_path)
+        self.creator = creator
+        self.accounts = [
+            # place accounts here in the format: {'name' : name, 'private_key' : private-key, 'public_key' : public-key}
+            {"name" : "tester001", "private_key" : "5KQeu7SdzxT1DiUzv7jaqwkwv1V8Fi7N8NBZtHugWYXqVFH1AFa", "public_key" : "TST8VfiahQsfS1TLcnBfp4NNfdw67uWweYbbUXymbNiDXVDrzUs7J"},
+            {"name" : "tester002", "private_key" : "5KgfcV9bgEen3v9mxkoGw6Rhuf2giDRZTHZjzwisjkrpF4FUh3N", "public_key" : "TST5gQPYm5bs9dRPHpqBy6dU32M8FcoKYFdF4YWEChUarc9FdYHzn"},
+            {"name" : "tester003", "private_key" : "5Jz3fcrrgKMbL8ncpzTdQmdRVHdxMhi8qScoxSR3TnAFUcdyD5N", "public_key" : "TST57wy5bXyJ4Z337Bo6RbinR6NyTRJxzond5dmGsP4gZ51yN6Zom"},
+            {"name" : "tester004", "private_key" : "5KcmobLVMSAVzETrZxfEGG73Zvi5SKTgJuZXtNgU3az2VK3Krye", "public_key" : "TST8dPte853xAuLMDV7PTVmiNMRwP6itMyvSmaht7J5tVczkDLa5K"},
+            {"name" : "tester005", "private_key" : "5Hy4vEeYmBDvmXipe5JAFPhNwCnx7NfsfyiktBTBURn9Qt1ihcA", "public_key" : "TST7CP7FFjvG55AUeH8riYbfD8NxTTtFH32ekQV4YFXmV6gU8uAg3"}
+        ]
+        keys = [account["private_key"] for account in self.accounts]
+        self.hived_keys.extend(keys)
+
+    def on_before_hived_run(self):
+        pass
+
+    def on_before_hivemind_sync_run(self):
+        # create accounts
+        create_accounts(self.hived_node_client, self.creator, self.accounts)
+        # tranfer to vesting
+        transfer_to_vesting(self.hived_node_client, self.creator, self.accounts, "300.000", 
+            "TESTS"
+        )
+        
+        # transfer assets to accounts
+        transfer_assets_to_accounts(self.hived_node_client, self.creator, self.accounts, 
+            "400.000", "TESTS"
+        )
+
+        transfer_assets_to_accounts(self.hived_node_client, self.creator, self.accounts, 
+            "400.000", "TBD"
+        )
+
+        logger.info("Balances for accounts after initial transfer")
+        print_balance(self.hived_node_client, self.accounts)
+        
+        create_posts(self.hived_node_client, self.accounts)
+
+    def on_before_hivemind_server_run(self):
+        pass
+
+    def on_after_hivemind_server_run(self):
+        # create community
+        community_name = "hive-12345"
+        communities = [
+            {"name" : community_name, "private_key" : "5Hxtady4d9AcJoQNt8FsMcGiq7rKSapMx7WZyG3UWB7YPfsHRRX", "public_key" : "TST6MsTXRVf3bFS5KBUHxrMRLSz7ystTUrGADFw98jEBWcZ3btXqb"},
+        ]
+        create_accounts(self.hived_node_client, "tester001", communities)
+        # validate if community exists
+        query = {
+            "jsonrpc": "2.0",
+            "id": 1,
+            "method": "bridge.list_communities",
+            "params": {
+                "sort" : "new"
+            }
+        }
+        ret = hive_utils.common.send_rpc_query(self.hivemind_server.get_address(), query)
+        assert 'error' not in ret, "Query returned error response"
+        assert 'result' in ret, "Expected result key in response"
+        result = ret['result']
+        assert len(result) == 1, "Expecting one item"
+        assert result['name'] == community_name, "Expected name `{}` got `{}`".format(community_name, result['name'])
+
+if __name__ == '__main__':
+    logger.info("Performing tests...")
+    import argparse
+    parser = argparse.ArgumentParser(description="Usage: python3 exaple.py path/to/hived/executable")
+    parser.add_argument("hived_path", help = "Path to hived executable.")
+    parser.add_argument("database_url", help = "Path to database")
+    parser.add_argument("--creator", dest="creator", default="initminer", help = "Account to create test accounts with")
+    parser.add_argument("--wif", dest="wif", default="5JNHfZYKGaomSFvd4NUdQ9qMcEAC43kujbfjueTHpVapX1Kzq2n", help="Private key for creator account")
+    parser.add_argument("--node-url", dest="node_url", default="http://127.0.0.1:8090", help="Url of working hive node")
+    parser.add_argument("--working-dir", dest="hived_working_dir", default="/tmp/hived-data/", help = "Path to hived working directory")
+    parser.add_argument("--config-path", dest="hived_config_path", default="../../hive_utils/resources/config.ini.in",help = "Path to source config.ini file")
+
+    args = parser.parse_args()
+
+    test_runner = HivemindFunctionalRunner(args.creator, args.hived_path, args.wif, args.node_url, args.database_url, args.hived_working_dir, args.hived_config_path)
+    test_runner.run()
diff --git a/hivemind/functional/tests/example/example.py b/hivemind/functional/tests/example/example.py
new file mode 100644
index 0000000000000000000000000000000000000000..8dc9b914b84b03e4ce0a477721582e0bb585be1d
--- /dev/null
+++ b/hivemind/functional/tests/example/example.py
@@ -0,0 +1,192 @@
+#!/usr/bin/python3
+
+import sys
+sys.path.append("../../")
+import hive_utils
+
+from uuid import uuid4
+from time import sleep
+import logging
+import os
+
+
+LOG_LEVEL = logging.INFO
+LOG_FORMAT = "%(asctime)-15s - %(name)s - %(levelname)s - %(message)s"
+MAIN_LOG_PATH = "functional_example.log"
+log_dir = os.environ.get("TEST_LOG_DIR", None)
+if log_dir is not None:
+    MAIN_LOG_PATH = log_dir + "/" + MAIN_LOG_PATH
+else:
+    MAIN_LOG_PATH = "./" + MAIN_LOG_PATH
+
+
+MODULE_NAME = "Functional-Example"
+logger = logging.getLogger(MODULE_NAME)
+logger.setLevel(LOG_LEVEL)
+
+ch = logging.StreamHandler(sys.stdout)
+ch.setLevel(LOG_LEVEL)
+ch.setFormatter(logging.Formatter(LOG_FORMAT))
+
+fh = logging.FileHandler(MAIN_LOG_PATH)
+fh.setLevel(LOG_LEVEL)
+fh.setFormatter(logging.Formatter(LOG_FORMAT))
+
+if not logger.hasHandlers():
+  logger.addHandler(ch)
+  logger.addHandler(fh)
+
+try:
+    from beem import Hive
+except Exception as ex:
+    logger.error("beem library is not installed.")
+    sys.exit(1)
+
+def create_accounts(node, creator, accounts):
+    """ Create accounts given as a list using `creator` as a creator account """
+    for account in accounts:
+        logger.info("Creating account: {}".format(account['name']))
+        node.create_account(account['name'], 
+            owner_key=account['public_key'], 
+            active_key=account['public_key'], 
+            posting_key=account['public_key'],
+            memo_key=account['public_key'],
+            store_keys = False,
+            creator=creator,
+            asset='TESTS'
+        )
+    hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def transfer_to_vesting(node, from_account, accounts, amount, asset):
+    """ Transfer assets to vesting from `from_account` to accounts given in list """
+    from beem.account import Account
+    for acnt in accounts:
+        logger.info("Transfer to vesting from {} to {} amount {} {}".format(
+            from_account, acnt['name'], amount, asset)
+        )
+        acc = Account(from_account, hive_instance=node)
+        acc.transfer_to_vesting(amount, to = acnt['name'], asset = asset)
+    hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def transfer_assets_to_accounts(node, from_account, accounts, amount, asset, wif=None):
+    """ Transfer assets `from_account` to accounts given in list """
+    from beem.account import Account
+    for acnt in accounts:
+        logger.info("Transfer from {} to {} amount {} {}".format(from_account, 
+            acnt['name'], amount, asset)
+        )
+        acc = Account(from_account, hive_instance=node)
+        acc.transfer(acnt['name'], amount, asset, memo = "initial transfer")
+    if wif is not None:
+        hive_utils.debug_generate_blocks(node.rpc.url, wif, 5)
+    else:
+        hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def get_permlink(account):
+    """ Helper for permlink creation """
+    return "functional-example-title-{}".format(account)
+
+
+def create_posts(node, accounts, wif=None):
+    """ Create example posts - one post for one account. Accounts given in list format """
+    logger.info("Creating posts...")
+    for acnt in accounts:
+        logger.info("New post ==> ({},{},{},{},{})".format(
+            "Hivepy example post title [{}]".format(acnt['name']), 
+            "Hivepy example post body [{}]".format(acnt['name']), 
+            acnt['name'], 
+            get_permlink(acnt['name']), 
+            "example"
+        ))
+        node.post("Hivepy example post title [{}]".format(acnt['name']), 
+            "Hivepy example post body [{}]".format(acnt['name']), 
+            acnt['name'], 
+            permlink = get_permlink(acnt['name']), 
+            tags = "example")
+    if wif is not None:
+        hive_utils.debug_generate_blocks(node.rpc.url, wif, 5)
+    else:
+        hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+def print_balance(node, accounts):
+    """ Print balance for accounts given as a list """
+    from beem.account import Account
+    balances = []
+    balances_str = []
+    for acnt in accounts:
+        ret = Account(acnt['name'], hive_instance=node).json()
+        hbd = ret.get('hbd_balance', None)
+        if hbd is not None:
+            hbd = hbd.get('amount')
+        balances_str.append("{}:{}".format(acnt['name'], hbd))
+        balances.append(hbd)
+    logger.info("Balances ==> {}".format(",".join(balances_str)))
+    return balances
+
+class HivemindFunctionalRunner(hive_utils.test_runner.TestRunner):
+    def __init__(self, creator, hived_path, wif, node_url, database_url, working_dir, config_path):
+        super().__init__(hived_path, wif, node_url, database_url, working_dir, config_path)
+        self.creator = creator
+        self.accounts = [
+            # place accounts here in the format: {'name' : name, 'private_key' : private-key, 'public_key' : public-key}
+            {"name" : "tester001", "private_key" : "5KQeu7SdzxT1DiUzv7jaqwkwv1V8Fi7N8NBZtHugWYXqVFH1AFa", "public_key" : "TST8VfiahQsfS1TLcnBfp4NNfdw67uWweYbbUXymbNiDXVDrzUs7J"},
+            {"name" : "tester002", "private_key" : "5KgfcV9bgEen3v9mxkoGw6Rhuf2giDRZTHZjzwisjkrpF4FUh3N", "public_key" : "TST5gQPYm5bs9dRPHpqBy6dU32M8FcoKYFdF4YWEChUarc9FdYHzn"},
+            {"name" : "tester003", "private_key" : "5Jz3fcrrgKMbL8ncpzTdQmdRVHdxMhi8qScoxSR3TnAFUcdyD5N", "public_key" : "TST57wy5bXyJ4Z337Bo6RbinR6NyTRJxzond5dmGsP4gZ51yN6Zom"},
+            {"name" : "tester004", "private_key" : "5KcmobLVMSAVzETrZxfEGG73Zvi5SKTgJuZXtNgU3az2VK3Krye", "public_key" : "TST8dPte853xAuLMDV7PTVmiNMRwP6itMyvSmaht7J5tVczkDLa5K"},
+            {"name" : "tester005", "private_key" : "5Hy4vEeYmBDvmXipe5JAFPhNwCnx7NfsfyiktBTBURn9Qt1ihcA", "public_key" : "TST7CP7FFjvG55AUeH8riYbfD8NxTTtFH32ekQV4YFXmV6gU8uAg3"}
+        ]
+        keys = [account["private_key"] for account in self.accounts]
+        self.hived_keys.extend(keys)
+
+    def on_before_hived_run(self):
+        pass
+
+    def on_before_hivemind_sync_run(self):
+        # create accounts
+        create_accounts(self.hived_node_client, self.creator, self.accounts)
+        # tranfer to vesting
+        transfer_to_vesting(self.hived_node_client, self.creator, self.accounts, "300.000", 
+            "TESTS"
+        )
+        
+        # transfer assets to accounts
+        transfer_assets_to_accounts(self.hived_node_client, self.creator, self.accounts, 
+            "400.000", "TESTS"
+        )
+
+        transfer_assets_to_accounts(self.hived_node_client, self.creator, self.accounts, 
+            "400.000", "TBD"
+        )
+
+        logger.info("Balances for accounts after initial transfer")
+        print_balance(self.hived_node_client, self.accounts)
+        
+        create_posts(self.hived_node_client, self.accounts)
+
+    def on_before_hivemind_server_run(self):
+        pass
+
+    def on_after_hivemind_server_run(self):
+        from time import sleep
+        sleep(60)
+
+
+if __name__ == '__main__':
+    logger.info("Performing tests...")
+    import argparse
+    parser = argparse.ArgumentParser(description="Usage: python3 exaple.py path/to/hived/executable")
+    parser.add_argument("hived_path", help = "Path to hived executable.")
+    parser.add_argument("database_url", help = "Path database.")
+    parser.add_argument("--creator", dest="creator", default="initminer", help = "Account to create test accounts with")
+    parser.add_argument("--wif", dest="wif", default="5JNHfZYKGaomSFvd4NUdQ9qMcEAC43kujbfjueTHpVapX1Kzq2n", help="Private key for creator account")
+    parser.add_argument("--node-url", dest="node_url", default="http://127.0.0.1:8090", help="Url of working hive node")
+    parser.add_argument("--working-dir", dest="hived_working_dir", default="/tmp/hived-data/", help = "Path to hived working directory")
+    parser.add_argument("--config-path", dest="hived_config_path", default="../../hive_utils/resources/config.ini.in",help = "Path to source config.ini file")
+
+    args = parser.parse_args()
+
+    test_runner = HivemindFunctionalRunner(args.creator, args.hived_path, args.wif, args.node_url, args.database_url, args.hived_working_dir, args.hived_config_path)
+    test_runner.run()
diff --git a/hivemind/functional/tests/follow/README.md b/hivemind/functional/tests/follow/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..fdcd72da1fac297ec309931d776f35f8f39f2bbd
--- /dev/null
+++ b/hivemind/functional/tests/follow/README.md
@@ -0,0 +1,71 @@
+1. `psql -U postgres`
+2. `create database hive_test`
+3. `python3 follow.py /path/to/testnet/hived/programs/hived/hived postgresql://hive@localhost:5432/hive_test`
+4. Logs in /tmp
+
+Run example:
+
+```
+$ python3 follow.py /home/dariusz-work/Builds/hive-testnet/programs/hived/hived postgresql://hive@localhost:5432/hive4
+2020-10-12 19:36:43,630 - Functional-Follow - INFO - Performing tests...
+2020-10-12 19:36:43,632 - test_runner.TestRunner - INFO - Executing before hived run hook
+2020-10-12 19:36:43,632 - test_runner.TestRunner - INFO - Running hived via /home/dariusz-work/Builds/hive-testnet/programs/hived/hived in /tmp/hived-data/ with config ../../hive_utils/resources/config.ini.in
+2020-10-12 19:36:43,632 - hive_node.HiveNodeInScreen - INFO - New hive node
+2020-10-12 19:36:43,645 - hive_node.HiveNodeInScreen - INFO - *** START NODE at http://127.0.0.1:8090 in /tmp/hived-data/
+2020-10-12 19:36:43,645 - hive_node.HiveNodeInScreen - INFO - Running hived with command: screen -m -d -L -c /tmp/hived-data//hive_screen-8090.cfg -S hived-8090-2020-10-12 /home/dariusz-work/Builds/hive-testnet/programs/hived/hived -d /tmp/hived-data/ --advanced-benchmark --sps-remove-threshold -1 --enable-stale-production
+2020-10-12 19:37:03,706 - hive_node.HiveNodeInScreen - INFO - Node at http://127.0.0.1:8090 in /tmp/hived-data/ is up and running...
+2020-10-12 19:37:04,320 - test_runner.TestRunner - INFO - Chain prefix is: TST
+2020-10-12 19:37:04,320 - test_runner.TestRunner - INFO - Chain ID is: 18dcf0a285365fc58b71f18b3d3fec954aa0c141c44e4e5cb4cf777b9eab274e
+2020-10-12 19:37:04,320 - test_runner.TestRunner - INFO - Executing before hivemind initial sync run hook
+2020-10-12 19:37:04,320 - Functional-Follow - INFO - Creating account: tester001
+2020-10-12 19:37:04,482 - Functional-Follow - INFO - Creating account: tester002
+2020-10-12 19:37:04,613 - Functional-Follow - INFO - Creating account: tester003
+2020-10-12 19:37:04,684 - Functional-Follow - INFO - Creating account: tester004
+2020-10-12 19:37:04,755 - Functional-Follow - INFO - Creating account: tester005
+2020-10-12 19:37:18,925 - Functional-Follow - INFO - Transfer to vesting from initminer to tester001 amount 300.000 TESTS
+2020-10-12 19:37:19,063 - Functional-Follow - INFO - Transfer to vesting from initminer to tester002 amount 300.000 TESTS
+2020-10-12 19:37:19,144 - Functional-Follow - INFO - Transfer to vesting from initminer to tester003 amount 300.000 TESTS
+2020-10-12 19:37:19,224 - Functional-Follow - INFO - Transfer to vesting from initminer to tester004 amount 300.000 TESTS
+2020-10-12 19:37:19,356 - Functional-Follow - INFO - Transfer to vesting from initminer to tester005 amount 300.000 TESTS
+2020-10-12 19:37:34,528 - Functional-Follow - INFO - Transfer from initminer to tester001 amount 400.000 TESTS
+2020-10-12 19:37:34,598 - Functional-Follow - INFO - Transfer from initminer to tester002 amount 400.000 TESTS
+2020-10-12 19:37:34,669 - Functional-Follow - INFO - Transfer from initminer to tester003 amount 400.000 TESTS
+2020-10-12 19:37:34,750 - Functional-Follow - INFO - Transfer from initminer to tester004 amount 400.000 TESTS
+2020-10-12 19:37:34,882 - Functional-Follow - INFO - Transfer from initminer to tester005 amount 400.000 TESTS
+2020-10-12 19:37:48,991 - Functional-Follow - INFO - Transfer from initminer to tester001 amount 400.000 TBD
+2020-10-12 19:37:49,123 - Functional-Follow - INFO - Transfer from initminer to tester002 amount 400.000 TBD
+2020-10-12 19:37:49,255 - Functional-Follow - INFO - Transfer from initminer to tester003 amount 400.000 TBD
+2020-10-12 19:37:49,336 - Functional-Follow - INFO - Transfer from initminer to tester004 amount 400.000 TBD
+2020-10-12 19:37:49,417 - Functional-Follow - INFO - Transfer from initminer to tester005 amount 400.000 TBD
+2020-10-12 19:38:04,539 - Functional-Follow - INFO - Balances for accounts after initial transfer
+2020-10-12 19:38:04,549 - Functional-Follow - INFO - Balances ==> tester001:400000,tester002:400000,tester003:400000,tester004:400000,tester005:400000
+2020-10-12 19:38:04,549 - Functional-Follow - INFO - Creating posts...
+2020-10-12 19:38:04,549 - Functional-Follow - INFO - New post ==> (Hivepy example post title [tester001],Hivepy example post body [tester001],tester001,functional-example-title-tester001,example)
+2020-10-12 19:38:04,627 - Functional-Follow - INFO - New post ==> (Hivepy example post title [tester002],Hivepy example post body [tester002],tester002,functional-example-title-tester002,example)
+2020-10-12 19:38:04,759 - Functional-Follow - INFO - New post ==> (Hivepy example post title [tester003],Hivepy example post body [tester003],tester003,functional-example-title-tester003,example)
+2020-10-12 19:38:04,840 - Functional-Follow - INFO - New post ==> (Hivepy example post title [tester004],Hivepy example post body [tester004],tester004,functional-example-title-tester004,example)
+2020-10-12 19:38:04,971 - Functional-Follow - INFO - New post ==> (Hivepy example post title [tester005],Hivepy example post body [tester005],tester005,functional-example-title-tester005,example)
+2020-10-12 19:38:49,178 - test_runner.TestRunner - INFO - Start hivemind instance and perform initial sync
+2020-10-12 19:38:49,178 - hivemind.HivemindInScreen - INFO - New hivemind instance
+2020-10-12 19:38:49,178 - hivemind.HivemindInScreen - INFO - Running hivemind with command: screen -m -d -L -c /tmp/hive_screen-8080.cfg -S hive-8080-2020-10-12 hive sync --steemd-url {"default": "http://127.0.0.1:8090"} --database-url postgresql://hive@localhost:5432/hive4 --http-server-port 8080
+2020-10-12 19:38:54,196 - hivemind.HivemindInScreen - INFO - Hivemind at http://0.0.0.0:8080 in /tmp is up and running...
+2020-10-12 19:38:54,197 - common - INFO - Waiting for string "Initial sync complete" in file /tmp/hive-8080-2020-10-12.log
+2020-10-12 19:38:55,198 - test_runner.TestRunner - INFO - Initial sync complete, switching to live sync mode
+2020-10-12 19:38:55,198 - test_runner.TestRunner - INFO - Executing before hivemind server run hook
+2020-10-12 19:38:55,198 - test_runner.TestRunner - INFO - Start hivemind instance as server
+2020-10-12 19:38:55,198 - hivemind.HivemindInScreen - INFO - New hivemind instance
+2020-10-12 19:38:55,198 - hivemind.HivemindInScreen - INFO - Running hivemind with command: screen -m -d -L -c /tmp/hive_screen-8081.cfg -S hive-8081-2020-10-12 hive server --steemd-url {"default": "http://127.0.0.1:8090"} --database-url postgresql://hive@localhost:5432/hive4 --http-server-port 8081
+2020-10-12 19:39:00,212 - hivemind.HivemindInScreen - INFO - Hivemind at http://0.0.0.0:8081 in /tmp is up and running...
+2020-10-12 19:39:00,212 - test_runner.TestRunner - INFO - Executing after hivemind server run hook
+2020-10-12 19:39:30,360 - test_runner.TestRunner - INFO - Stopping hived and all hivemind instances
+2020-10-12 19:39:30,360 - hivemind.HivemindInScreen - INFO - Stopping hivemind at http://0.0.0.0:8081
+2020-10-12 19:39:30,360 - common - INFO - Terminating hive process running on port 8081
+2020-10-12 19:39:30,381 - common - INFO - Done...
+2020-10-12 19:39:30,381 - hivemind.HivemindInScreen - INFO - Stopping hivemind at http://0.0.0.0:8080
+2020-10-12 19:39:30,381 - common - INFO - Terminating hive process running on port 8080
+2020-10-12 19:39:30,403 - common - INFO - Done...
+2020-10-12 19:39:30,403 - hive_node.HiveNodeInScreen - INFO - Stopping node at http://127.0.0.1:8090
+2020-10-12 19:39:30,403 - common - INFO - Terminating hived process running on port 8090
+2020-10-12 19:39:30,427 - common - INFO - Done...
+
+```
\ No newline at end of file
diff --git a/hivemind/functional/tests/follow/follow.py b/hivemind/functional/tests/follow/follow.py
new file mode 100644
index 0000000000000000000000000000000000000000..841025ec54168e30fbcabd3a88f5aa9ec7abda0a
--- /dev/null
+++ b/hivemind/functional/tests/follow/follow.py
@@ -0,0 +1,199 @@
+#!/usr/bin/python3
+
+import sys
+sys.path.append("../../")
+import hive_utils
+
+from time import sleep
+import logging
+import os
+
+
+LOG_LEVEL = logging.INFO
+LOG_FORMAT = "%(asctime)-15s - %(name)s - %(levelname)s - %(message)s"
+MAIN_LOG_PATH = "functional_follow.log"
+log_dir = os.environ.get("TEST_LOG_DIR", None)
+if log_dir is not None:
+    MAIN_LOG_PATH = log_dir + "/" + MAIN_LOG_PATH
+else:
+    MAIN_LOG_PATH = "./" + MAIN_LOG_PATH
+
+
+MODULE_NAME = "Functional-Follow"
+logger = logging.getLogger(MODULE_NAME)
+logger.setLevel(LOG_LEVEL)
+
+ch = logging.StreamHandler(sys.stdout)
+ch.setLevel(LOG_LEVEL)
+ch.setFormatter(logging.Formatter(LOG_FORMAT))
+
+fh = logging.FileHandler(MAIN_LOG_PATH)
+fh.setLevel(LOG_LEVEL)
+fh.setFormatter(logging.Formatter(LOG_FORMAT))
+
+if not logger.hasHandlers():
+  logger.addHandler(ch)
+  logger.addHandler(fh)
+
+try:
+    from beem import Hive
+except Exception as ex:
+    logger.error("beem library is not installed.")
+    sys.exit(1)
+
+def create_accounts(node, creator, accounts):
+    """ Create accounts given as a list using `creator` as a creator account """
+    for account in accounts:
+        logger.info("Creating account: {}".format(account['name']))
+        node.create_account(account['name'], 
+            owner_key=account['public_key'], 
+            active_key=account['public_key'], 
+            posting_key=account['public_key'],
+            memo_key=account['public_key'],
+            store_keys = False,
+            creator=creator,
+            asset='TESTS'
+        )
+    hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def transfer_to_vesting(node, from_account, accounts, amount, asset):
+    """ Transfer assets to vesting from `from_account` to accounts given in list """
+    from beem.account import Account
+    for acnt in accounts:
+        logger.info("Transfer to vesting from {} to {} amount {} {}".format(
+            from_account, acnt['name'], amount, asset)
+        )
+        acc = Account(from_account, hive_instance=node)
+        acc.transfer_to_vesting(amount, to = acnt['name'], asset = asset)
+    hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def transfer_assets_to_accounts(node, from_account, accounts, amount, asset, wif=None):
+    """ Transfer assets `from_account` to accounts given in list """
+    from beem.account import Account
+    for acnt in accounts:
+        logger.info("Transfer from {} to {} amount {} {}".format(from_account, 
+            acnt['name'], amount, asset)
+        )
+        acc = Account(from_account, hive_instance=node)
+        acc.transfer(acnt['name'], amount, asset, memo = "initial transfer")
+    if wif is not None:
+        hive_utils.debug_generate_blocks(node.rpc.url, wif, 5)
+    else:
+        hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def get_permlink(account):
+    """ Helper for permlink creation """
+    return "functional-example-title-{}".format(account)
+
+
+def create_posts(node, accounts, wif=None):
+    """ Create example posts - one post for one account. Accounts given in list format """
+    logger.info("Creating posts...")
+    for acnt in accounts:
+        logger.info("New post ==> ({},{},{},{},{})".format(
+            "Hivepy example post title [{}]".format(acnt['name']), 
+            "Hivepy example post body [{}]".format(acnt['name']), 
+            acnt['name'], 
+            get_permlink(acnt['name']), 
+            "example"
+        ))
+        node.post("Hivepy example post title [{}]".format(acnt['name']), 
+            "Hivepy example post body [{}]".format(acnt['name']), 
+            acnt['name'], 
+            permlink = get_permlink(acnt['name']), 
+            tags = "example")
+    if wif is not None:
+        hive_utils.debug_generate_blocks(node.rpc.url, wif, 5)
+    else:
+        hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+def print_balance(node, accounts):
+    """ Print balance for accounts given as a list """
+    from beem.account import Account
+    balances = []
+    balances_str = []
+    for acnt in accounts:
+        ret = Account(acnt['name'], hive_instance=node).json()
+        hbd = ret.get('hbd_balance', None)
+        if hbd is not None:
+            hbd = hbd.get('amount')
+        balances_str.append("{}:{}".format(acnt['name'], hbd))
+        balances.append(hbd)
+    logger.info("Balances ==> {}".format(",".join(balances_str)))
+    return balances
+
+class HivemindFunctionalRunner(hive_utils.test_runner.TestRunner):
+    def __init__(self, creator, hived_path, wif, node_url, database_url, working_dir, config_path):
+        super().__init__(hived_path, wif, node_url, database_url, working_dir, config_path)
+        self.creator = creator
+        self.accounts = [
+            # place accounts here in the format: {'name' : name, 'private_key' : private-key, 'public_key' : public-key}
+            {"name" : "tester001", "private_key" : "5KQeu7SdzxT1DiUzv7jaqwkwv1V8Fi7N8NBZtHugWYXqVFH1AFa", "public_key" : "TST8VfiahQsfS1TLcnBfp4NNfdw67uWweYbbUXymbNiDXVDrzUs7J"},
+            {"name" : "tester002", "private_key" : "5KgfcV9bgEen3v9mxkoGw6Rhuf2giDRZTHZjzwisjkrpF4FUh3N", "public_key" : "TST5gQPYm5bs9dRPHpqBy6dU32M8FcoKYFdF4YWEChUarc9FdYHzn"},
+            {"name" : "tester003", "private_key" : "5Jz3fcrrgKMbL8ncpzTdQmdRVHdxMhi8qScoxSR3TnAFUcdyD5N", "public_key" : "TST57wy5bXyJ4Z337Bo6RbinR6NyTRJxzond5dmGsP4gZ51yN6Zom"},
+            {"name" : "tester004", "private_key" : "5KcmobLVMSAVzETrZxfEGG73Zvi5SKTgJuZXtNgU3az2VK3Krye", "public_key" : "TST8dPte853xAuLMDV7PTVmiNMRwP6itMyvSmaht7J5tVczkDLa5K"},
+            {"name" : "tester005", "private_key" : "5Hy4vEeYmBDvmXipe5JAFPhNwCnx7NfsfyiktBTBURn9Qt1ihcA", "public_key" : "TST7CP7FFjvG55AUeH8riYbfD8NxTTtFH32ekQV4YFXmV6gU8uAg3"}
+        ]
+        keys = [account["private_key"] for account in self.accounts]
+        self.hived_keys.extend(keys)
+
+    def on_before_hived_run(self):
+        pass
+
+    def on_before_hivemind_sync_run(self):
+        # create accounts
+        create_accounts(self.hived_node_client, self.creator, self.accounts)
+        # tranfer to vesting
+        transfer_to_vesting(self.hived_node_client, self.creator, self.accounts, "300.000", 
+            "TESTS"
+        )
+        
+        # transfer assets to accounts
+        transfer_assets_to_accounts(self.hived_node_client, self.creator, self.accounts, 
+            "400.000", "TESTS"
+        )
+
+        transfer_assets_to_accounts(self.hived_node_client, self.creator, self.accounts, 
+            "400.000", "TBD"
+        )
+
+        logger.info("Balances for accounts after initial transfer")
+        print_balance(self.hived_node_client, self.accounts)
+        
+        create_posts(self.hived_node_client, self.accounts)
+
+        json_data = ["follow", {"follower":"tester001","following":["tester002","tester003"],"what":["blog"]}]
+
+        self.hived_node_client.custom_json("follow",json_data, required_posting_auths=["tester001"])
+
+        sleep(30)
+
+    def on_before_hivemind_server_run(self):
+        pass
+
+    def on_after_hivemind_server_run(self):
+        json_data = ["follow", {"follower":"tester001","following":["tester004","tester005"],"what":["blog"]}]
+        self.hived_node_client.custom_json("follow", json_data, required_posting_auths=["tester001"])
+        sleep(30)
+
+
+if __name__ == '__main__':
+    logger.info("Performing tests...")
+    import argparse
+    parser = argparse.ArgumentParser(description="Usage: python3 exaple.py path/to/hived/executable")
+    parser.add_argument("hived_path", help = "Path to hived executable.")
+    parser.add_argument("database_url", help = "Path to database.")
+    parser.add_argument("--creator", dest="creator", default="initminer", help = "Account to create test accounts with")
+    parser.add_argument("--wif", dest="wif", default="5JNHfZYKGaomSFvd4NUdQ9qMcEAC43kujbfjueTHpVapX1Kzq2n", help="Private key for creator account")
+    parser.add_argument("--node-url", dest="node_url", default="http://127.0.0.1:8090", help="Url of working hive node")
+    parser.add_argument("--working-dir", dest="hived_working_dir", default="/tmp/hived-data/", help = "Path to hived working directory")
+    parser.add_argument("--config-path", dest="hived_config_path", default="../../hive_utils/resources/config.ini.in",help = "Path to source config.ini file")
+
+    args = parser.parse_args()
+
+    test_runner = HivemindFunctionalRunner(args.creator, args.hived_path, args.wif, args.node_url, args.database_url, args.hived_working_dir, args.hived_config_path)
+    test_runner.run()
+
diff --git a/hivemind/functional/tests/trailing_test/trailing_test.py b/hivemind/functional/tests/trailing_test/trailing_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..1b78daae0868c621e3de508c684cff908f4ee95d
--- /dev/null
+++ b/hivemind/functional/tests/trailing_test/trailing_test.py
@@ -0,0 +1,209 @@
+#!/usr/bin/python3
+
+import sys
+sys.path.append("../../")
+import hive_utils
+
+from uuid import uuid4
+from time import sleep
+import logging
+import os
+
+
+LOG_LEVEL = logging.INFO
+LOG_FORMAT = "%(asctime)-15s - %(name)s - %(levelname)s - %(message)s"
+MAIN_LOG_PATH = "functional_example.log"
+log_dir = os.environ.get("TEST_LOG_DIR", None)
+if log_dir is not None:
+    MAIN_LOG_PATH = log_dir + "/" + MAIN_LOG_PATH
+else:
+    MAIN_LOG_PATH = "./" + MAIN_LOG_PATH
+
+
+MODULE_NAME = "Functional-Example"
+logger = logging.getLogger(MODULE_NAME)
+logger.setLevel(LOG_LEVEL)
+
+ch = logging.StreamHandler(sys.stdout)
+ch.setLevel(LOG_LEVEL)
+ch.setFormatter(logging.Formatter(LOG_FORMAT))
+
+fh = logging.FileHandler(MAIN_LOG_PATH)
+fh.setLevel(LOG_LEVEL)
+fh.setFormatter(logging.Formatter(LOG_FORMAT))
+
+if not logger.hasHandlers():
+  logger.addHandler(ch)
+  logger.addHandler(fh)
+
+try:
+    from beem import Hive
+except Exception as ex:
+    logger.error("beem library is not installed.")
+    sys.exit(1)
+
+def create_accounts(node, creator, accounts):
+    """ Create accounts given as a list using `creator` as a creator account """
+    for account in accounts:
+        logger.info("Creating account: {}".format(account['name']))
+        node.create_account(account['name'], 
+            owner_key=account['public_key'], 
+            active_key=account['public_key'], 
+            posting_key=account['public_key'],
+            memo_key=account['public_key'],
+            store_keys = False,
+            creator=creator,
+            asset='TESTS'
+        )
+    hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def transfer_to_vesting(node, from_account, accounts, amount, asset):
+    """ Transfer assets to vesting from `from_account` to accounts given in list """
+    from beem.account import Account
+    for acnt in accounts:
+        logger.info("Transfer to vesting from {} to {} amount {} {}".format(
+            from_account, acnt['name'], amount, asset)
+        )
+        acc = Account(from_account, hive_instance=node)
+        acc.transfer_to_vesting(amount, to = acnt['name'], asset = asset)
+    hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def transfer_assets_to_accounts(node, from_account, accounts, amount, asset, wif=None):
+    """ Transfer assets `from_account` to accounts given in list """
+    from beem.account import Account
+    for acnt in accounts:
+        logger.info("Transfer from {} to {} amount {} {}".format(from_account, 
+            acnt['name'], amount, asset)
+        )
+        acc = Account(from_account, hive_instance=node)
+        acc.transfer(acnt['name'], amount, asset, memo = "initial transfer")
+    if wif is not None:
+        hive_utils.debug_generate_blocks(node.rpc.url, wif, 5)
+    else:
+        hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+
+def get_permlink(account):
+    """ Helper for permlink creation """
+    return "functional-example-title-{}".format(account)
+
+
+def create_posts(node, accounts, wif=None):
+    """ Create example posts - one post for one account. Accounts given in list format """
+    logger.info("Creating posts...")
+    for acnt in accounts:
+        logger.info("New post ==> ({},{},{},{},{})".format(
+            "Hivepy example post title [{}]".format(acnt['name']), 
+            "Hivepy example post body [{}]".format(acnt['name']), 
+            acnt['name'], 
+            get_permlink(acnt['name']), 
+            "example"
+        ))
+        node.post("Hivepy example post title [{}]".format(acnt['name']), 
+            "Hivepy example post body [{}]".format(acnt['name']), 
+            acnt['name'], 
+            permlink = get_permlink(acnt['name']), 
+            tags = "example")
+    if wif is not None:
+        hive_utils.debug_generate_blocks(node.rpc.url, wif, 5)
+    else:
+        hive_utils.common.wait_n_blocks(node.rpc.url, 5)
+
+def print_balance(node, accounts):
+    """ Print balance for accounts given as a list """
+    from beem.account import Account
+    balances = []
+    balances_str = []
+    for acnt in accounts:
+        ret = Account(acnt['name'], hive_instance=node).json()
+        hbd = ret.get('hbd_balance', None)
+        if hbd is not None:
+            hbd = hbd.get('amount')
+        balances_str.append("{}:{}".format(acnt['name'], hbd))
+        balances.append(hbd)
+    logger.info("Balances ==> {}".format(",".join(balances_str)))
+    return balances
+
+class HivemindFunctionalRunner(hive_utils.test_runner.TestRunner):
+    def __init__(self, creator, hived_path, wif, node_url, database_url, working_dir, config_path):
+        super().__init__(hived_path, wif, node_url, database_url, working_dir, config_path)
+        self.creator = creator
+        self.accounts = [
+            # place accounts here in the format: {'name' : name, 'private_key' : private-key, 'public_key' : public-key}
+            {"name" : "tester001", "private_key" : "5KQeu7SdzxT1DiUzv7jaqwkwv1V8Fi7N8NBZtHugWYXqVFH1AFa", "public_key" : "TST8VfiahQsfS1TLcnBfp4NNfdw67uWweYbbUXymbNiDXVDrzUs7J"},
+            {"name" : "tester002", "private_key" : "5KgfcV9bgEen3v9mxkoGw6Rhuf2giDRZTHZjzwisjkrpF4FUh3N", "public_key" : "TST5gQPYm5bs9dRPHpqBy6dU32M8FcoKYFdF4YWEChUarc9FdYHzn"},
+            {"name" : "tester003", "private_key" : "5Jz3fcrrgKMbL8ncpzTdQmdRVHdxMhi8qScoxSR3TnAFUcdyD5N", "public_key" : "TST57wy5bXyJ4Z337Bo6RbinR6NyTRJxzond5dmGsP4gZ51yN6Zom"},
+            {"name" : "tester004", "private_key" : "5KcmobLVMSAVzETrZxfEGG73Zvi5SKTgJuZXtNgU3az2VK3Krye", "public_key" : "TST8dPte853xAuLMDV7PTVmiNMRwP6itMyvSmaht7J5tVczkDLa5K"},
+            {"name" : "tester005", "private_key" : "5Hy4vEeYmBDvmXipe5JAFPhNwCnx7NfsfyiktBTBURn9Qt1ihcA", "public_key" : "TST7CP7FFjvG55AUeH8riYbfD8NxTTtFH32ekQV4YFXmV6gU8uAg3"}
+        ]
+        keys = [account["private_key"] for account in self.accounts]
+        self.hived_keys.extend(keys)
+
+    def on_before_hived_run(self):
+        pass
+
+    def on_before_hivemind_sync_run(self):
+        # create accounts
+        create_accounts(self.hived_node_client, self.creator, self.accounts)
+        # tranfer to vesting
+        transfer_to_vesting(self.hived_node_client, self.creator, self.accounts, "300.000", 
+            "TESTS"
+        )
+        
+        # transfer assets to accounts
+        transfer_assets_to_accounts(self.hived_node_client, self.creator, self.accounts, 
+            "400.000", "TESTS"
+        )
+
+        transfer_assets_to_accounts(self.hived_node_client, self.creator, self.accounts, 
+            "400.000", "TBD"
+        )
+
+        logger.info("Balances for accounts after initial transfer")
+        print_balance(self.hived_node_client, self.accounts)
+        
+        create_posts(self.hived_node_client, self.accounts)
+
+    def on_before_hivemind_server_run(self):
+        pass
+
+    def on_after_hivemind_server_run(self):
+        from time import sleep
+        counter = 0
+        while counter < 20:
+            #ask for headblocks
+            ret = self.hived_node_client.rpc.get_dynamic_global_properties()
+            logger.info("Head block from Hived: {}".format(ret['head_block_number']))
+
+            query = {
+              "jsonrpc":"2.0", 
+              "method":"hive.db_head_state", 
+              "id":1
+            }
+
+            ret = hive_utils.common.send_rpc_query(self.hivemind_server.get_address(), query)
+            logger.info("Head block from hivemind: {}".format(ret['result']['db_head_block']))
+
+            sleep(3)
+            counter += 1
+
+
+if __name__ == '__main__':
+    logger.info("Performing tests...")
+    import argparse
+    parser = argparse.ArgumentParser(description="Usage: python3 exaple.py path/to/hived/executable")
+    parser.add_argument("hived_path", help = "Path to hived executable.")
+    parser.add_argument("database_url", help = "Path to database.")
+    parser.add_argument("--creator", dest="creator", default="initminer", help = "Account to create test accounts with")
+    parser.add_argument("--wif", dest="wif", default="5JNHfZYKGaomSFvd4NUdQ9qMcEAC43kujbfjueTHpVapX1Kzq2n", help="Private key for creator account")
+    parser.add_argument("--node-url", dest="node_url", default="http://127.0.0.1:8090", help="Url of working hive node")
+    parser.add_argument("--working-dir", dest="hived_working_dir", default="/tmp/hived-data/", help = "Path to hived working directory")
+    parser.add_argument("--config-path", dest="hived_config_path", default="../../hive_utils/resources/config.ini.in",help = "Path to source config.ini file")
+
+    args = parser.parse_args()
+
+    test_runner = HivemindFunctionalRunner(args.creator, args.hived_path, args.wif, args.node_url, args.database_url, args.hived_working_dir, args.hived_config_path)
+    test_runner.run()
+