Skip to content
Snippets Groups Projects
Commit 56efafa7 authored by roadscape's avatar roadscape
Browse files

new env/arg parser stable, close #47

parent b31ab750
No related branches found
No related tags found
No related merge requests found
# Ignore hive checkpoint files # Ignore hive checkpoint files
checkpoints/*.json.lst checkpoints/*.json.lst
# ignore hive.conf
hive.conf
# OSX # OSX
.DS_Store .DS_Store
......
import logging
import configargparse
class Conf():
_args = None
@classmethod
def read(cls):
assert not cls._args, "config already read"
#pylint: disable=invalid-name,line-too-long
p = configargparse.get_arg_parser(default_config_files=['./hive.conf'])
# common
p.add('--database-url', env_var='DATABASE_URL', required=True, help='database connection url', default='postgresql://user:pass@localhost:5432/hive')
p.add('--steemd-url', env_var='STEEMD_URL', required=True, help='steemd/jussi endpoint', default='https://api.steemit.com')
p.add('--log-level', env_var='LOG_LEVEL', default='INFO')
# specific to indexer
p.add('--max-workers', type=int, env_var='MAX_WORKERS', default=1)
p.add('--max-batch', type=int, env_var='MAX_BATCH', default=100)
p.add('--trail-blocks', type=int, env_var='TRAIL_BLOCKS', default=2)
# specific to API server
p.add('--port', type=int, env_var='PORT', default=8080)
cls._args = p.parse_args()
@classmethod
def args(cls):
return cls._args
@classmethod
def get(cls, param):
assert cls._args, "run Conf.read()"
return getattr(cls._args, param)
@classmethod
def log_level(cls):
str_log_level = cls.get('log_level')
log_level = getattr(logging, str_log_level.upper(), None)
if not isinstance(log_level, int):
raise ValueError('Invalid log level: %s' % str_log_level)
return log_level
from hive.db.schema import connect
conn = connect(echo=False)
...@@ -3,10 +3,18 @@ import time ...@@ -3,10 +3,18 @@ import time
import re import re
import atexit import atexit
from hive.db import conn
from funcy.seqs import first from funcy.seqs import first
from sqlalchemy import text from sqlalchemy import text
from hive.db.schema import connect
_conn = None
def conn():
global _conn
if not _conn:
_conn = connect(echo=False)
return _conn
class QueryStats: class QueryStats:
stats = {} stats = {}
ttl_time = 0.0 ttl_time = 0.0
...@@ -89,10 +97,10 @@ def __query(sql, **kwargs): ...@@ -89,10 +97,10 @@ def __query(sql, **kwargs):
_query = text(sql).execution_options(autocommit=False) _query = text(sql).execution_options(autocommit=False)
try: try:
return conn.execute(_query, **kwargs) return conn().execute(_query, **kwargs)
except Exception as e: except Exception as e:
print("[SQL] Error in query {} ({})".format(sql, kwargs)) print("[SQL] Error in query {} ({})".format(sql, kwargs))
#conn.close() # TODO: check if needed #conn().close() # TODO: check if needed
logger.exception(e) logger.exception(e)
raise e raise e
...@@ -124,7 +132,7 @@ def query_one(sql, **kwargs): ...@@ -124,7 +132,7 @@ def query_one(sql, **kwargs):
return first(row) return first(row)
def db_engine(): def db_engine():
engine = conn.dialect.name engine = conn().dialect.name
if engine not in ['postgresql', 'mysql']: if engine not in ['postgresql', 'mysql']:
raise Exception("db engine %s not supported" % engine) raise Exception("db engine %s not supported" % engine)
return engine return engine
import logging import logging
import os
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.sql import text as sql_text from sqlalchemy.sql import text as sql_text
...@@ -9,6 +8,10 @@ from sqlalchemy.types import VARCHAR ...@@ -9,6 +8,10 @@ from sqlalchemy.types import VARCHAR
from sqlalchemy.types import TEXT from sqlalchemy.types import TEXT
from sqlalchemy.types import BOOLEAN from sqlalchemy.types import BOOLEAN
from hive.conf import Conf
#pylint: disable=line-too-long
metadata = sa.MetaData() metadata = sa.MetaData()
hive_blocks = sa.Table( hive_blocks = sa.Table(
...@@ -274,18 +277,17 @@ hive_state = sa.Table( ...@@ -274,18 +277,17 @@ hive_state = sa.Table(
mysql_default_charset='utf8mb4' mysql_default_charset='utf8mb4'
) )
_url = os.environ.get('DATABASE_URL')
assert _url, 'missing ENV DATABASE_URL'
logging.basicConfig() logging.basicConfig()
#if os.environ.get('LOG_LEVEL') == 'INFO': # ultra-verbose #if Conf.get('log_level') == 'INFO': # ultra-verbose
# logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) # logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING) logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
def connect(connection_url=_url, **kwargs): def connect(**kwargs):
connection_url = Conf.get('database_url')
return sa.create_engine(connection_url, isolation_level="READ UNCOMMITTED", pool_recycle=3600, **kwargs).connect() return sa.create_engine(connection_url, isolation_level="READ UNCOMMITTED", pool_recycle=3600, **kwargs).connect()
def setup():
def setup(connection_url=_url): connection_url = Conf.get('database_url')
engine = sa.create_engine(connection_url) engine = sa.create_engine(connection_url)
metadata.create_all(engine) metadata.create_all(engine)
...@@ -309,7 +311,8 @@ def setup(connection_url=_url): ...@@ -309,7 +311,8 @@ def setup(connection_url=_url):
conn.execute(insert) conn.execute(insert)
def teardown(connection_url=_url): def teardown():
connection_url = Conf.get('database_url')
engine = sa.create_engine(connection_url) engine = sa.create_engine(connection_url)
metadata.drop_all(engine) metadata.drop_all(engine)
......
...@@ -8,6 +8,8 @@ import traceback ...@@ -8,6 +8,8 @@ import traceback
from funcy.seqs import drop from funcy.seqs import drop
from toolz import partition_all from toolz import partition_all
from hive.conf import Conf
from hive.db.methods import query from hive.db.methods import query
from hive.db.db_state import DbState from hive.db.db_state import DbState
...@@ -204,4 +206,5 @@ def head_state(*args): ...@@ -204,4 +206,5 @@ def head_state(*args):
if __name__ == '__main__': if __name__ == '__main__':
Conf.read()
run() run()
import os
import time import time
import atexit import atexit
import resource import resource
from decimal import Decimal from decimal import Decimal
from hive.conf import Conf
from hive.utils.normalize import parse_time from hive.utils.normalize import parse_time
from .http_client import HttpClient, RPCError from .http_client import HttpClient, RPCError
...@@ -93,19 +93,13 @@ class SteemClient: ...@@ -93,19 +93,13 @@ class SteemClient:
@classmethod @classmethod
def instance(cls): def instance(cls):
if not cls._instance: if not cls._instance:
api_endpoint = os.environ.get('STEEMD_URL') cls._instance = SteemClient(
max_batch = int(os.environ.get('MAX_BATCH', 500)) url=Conf.get('steemd_url'),
max_workers = int(os.environ.get('MAX_WORKERS', 1)) max_batch=Conf.get('max_batch'),
max_workers=Conf.get('max_workers'))
# TODO: remove after updating docs/orchestration
if os.environ.get('JUSSI_URL'):
print("JUSSI_URL deprecated; use STEEMD_URL")
api_endpoint = os.environ.get('JUSSI_URL')
cls._instance = SteemClient(api_endpoint, max_batch, max_workers)
return cls._instance return cls._instance
def __init__(self, url, max_batch=500, max_workers=1, use_appbase=False): def __init__(self, url, max_batch=500, max_workers=1):
assert url, 'steem-API endpoint undefined' assert url, 'steem-API endpoint undefined'
assert max_batch > 0 and max_batch <= 5000 assert max_batch > 0 and max_batch <= 5000
assert max_workers > 0 and max_workers <= 500 assert max_workers > 0 and max_workers <= 500
......
...@@ -9,18 +9,17 @@ from aiopg.sa import create_engine ...@@ -9,18 +9,17 @@ from aiopg.sa import create_engine
from jsonrpcserver import config from jsonrpcserver import config
from jsonrpcserver.async_methods import AsyncMethods from jsonrpcserver.async_methods import AsyncMethods
from hive.conf import Conf
from hive.server import condenser_api from hive.server import condenser_api
from hive.server import hive_api from hive.server import hive_api
str_log_level = os.environ.get('LOG_LEVEL') or 'DEBUG' Conf.read()
log_level = getattr(logging, str_log_level.upper(), None) log_level = Conf.log_level()
if not isinstance(log_level, int):
raise ValueError('Invalid log level: %s' % str_log_level)
config.debug = (log_level == logging.DEBUG) config.debug = (log_level == logging.DEBUG)
logging.basicConfig(level=log_level) logging.basicConfig(level=log_level)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logging.getLogger('jsonrpcserver.dispatcher.response').setLevel(log_level) logging.getLogger('jsonrpcserver.dispatcher.response').setLevel(log_level)
...@@ -114,7 +113,6 @@ async def health(request): ...@@ -114,7 +113,6 @@ async def health(request):
state=state, state=state,
timestamp=datetime.utcnow().isoformat())) timestamp=datetime.utcnow().isoformat()))
async def jsonrpc_handler(request): async def jsonrpc_handler(request):
request = await request.text() request = await request.text()
response = await methods.dispatch(request) response = await methods.dispatch(request)
...@@ -134,10 +132,5 @@ app.router.add_post('/legacy', non_appbase_handler) ...@@ -134,10 +132,5 @@ app.router.add_post('/legacy', non_appbase_handler)
if __name__ == '__main__': if __name__ == '__main__':
import argparse app['config']['args'] = Conf.args()
parser = argparse.ArgumentParser(description="hivemind jsonrpc server") web.run_app(app, port=app['config']['args'].port)
parser.add_argument('--database_url', type=str, default='postgresql://root:root_password@127.0.0.1:5432/testdb')
parser.add_argument('--port', type=int, default=8080)
args = parser.parse_args()
app['config']['args'] = args
web.run_app(app, port=args.port)
...@@ -2,4 +2,4 @@ ...@@ -2,4 +2,4 @@
POPULATE_CMD="$(which hive)" POPULATE_CMD="$(which hive)"
python3 /app/hive/server/serve.py --database_url="${DATABASE_URL}" --port="${HTTP_SERVER_PORT}" python3 /app/hive/server/serve.py --database-url="${DATABASE_URL}" --port="${HTTP_SERVER_PORT}"
...@@ -39,6 +39,7 @@ setup( ...@@ -39,6 +39,7 @@ setup(
'PrettyTable', 'PrettyTable',
'psycopg2', 'psycopg2',
'aiocache', 'aiocache',
'configargparse',
], ],
entry_points={ entry_points={
'console_scripts': [ 'console_scripts': [
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment