diff --git a/bitshares/__init__.py b/bitshares/__init__.py index cdc3d412669c1d6b3f92b4d105ba3bbac2012463..38931c10e84061c29a0e1dc192bd9c2a0b9d96ac 100644 --- a/bitshares/__init__.py +++ b/bitshares/__init__.py @@ -1 +1 @@ -__ALL__ = [] +__all__ = ['dex'] diff --git a/bitshares/deep_eq.py b/bitshares/deep_eq.py new file mode 100644 index 0000000000000000000000000000000000000000..7168fdbc7d87b6c2af0b4dcc9974ba06fefc57dd --- /dev/null +++ b/bitshares/deep_eq.py @@ -0,0 +1,35 @@ +def deep_eq(_v1, _v2): + import operator + import types + + def _deep_dict_eq(d1, d2): + k1 = sorted(d1.keys()) + k2 = sorted(d2.keys()) + if k1 != k2: # keys should be exactly equal + return False + return sum(deep_eq(d1[k], d2[k]) for k in k1) == len(k1) + + def _deep_iter_eq(l1, l2): + if len(l1) != len(l2): + return False + return sum(deep_eq(v1, v2) for v1, v2 in zip(l1, l2)) == len(l1) + + op = operator.eq + c1, c2 = (_v1, _v2) + + # guard against strings because they are also iterable + # and will consistently cause a RuntimeError (maximum recursion limit reached) + if isinstance(_v1, str): + return op(c1, c2) + + if isinstance(_v1, dict): + op = _deep_dict_eq + else: + try: + c1, c2 = (list(iter(_v1)), list(iter(_v2))) + except TypeError: + c1, c2 = _v1, _v2 + else: + op = _deep_iter_eq + + return op(c1, c2) diff --git a/bitshares/dex.py b/bitshares/dex.py new file mode 100644 index 0000000000000000000000000000000000000000..c61e32772a8148c1e053498decc490794dcd8343 --- /dev/null +++ b/bitshares/dex.py @@ -0,0 +1,1697 @@ +from grapheneapi.grapheneclient import GrapheneClient +from graphenebase import transactions +from graphenebase.operations import operations +from graphenebase.account import PrivateKey, PublicKey +from graphenebase import memo as Memo +from datetime import datetime +import time +import math +from grapheneextra.proposal import Proposal +import logging +from . import deep_eq +log = logging.getLogger(__name__) + + +class NoWalletException(Exception): + pass + + +class InvalidWifKey(Exception): + pass + + +class WifNotActive(Exception): + pass + + +class ExampleConfig() : + """ The behavior of your program can be + defined in a separated class (here called ``ExampleConfig()``. It + contains the wallet and witness connection parameters: + + Configuration Rules: + + * `witness_url` is required in all cases + * If you want to run a bot continuously, the configuration needs + to be inherited from `GrapheneWebsocketProtocol` + * Either you provide access to a cli_wallet via `wallet_host` + (etc.) or your need to provide the **active private key** to the + account as `wif` + + The config class is used to define several attributes *and* + methods that will be used during API communication.. + + .. code-block:: python + + class Config(GrapheneWebsocketProtocol): # Note the dependency + wallet_host = "localhost" + wallet_port = 8092 + wallet_user = "" + wallet_password = "" + witness_url = "ws://localhost:8090/" + witness_user = "" + witness_password = "" + wif = None + + All methods within ``graphene.rpc`` are mapped to the + corresponding RPC call of the **wallet** and the parameters are + handed over directly. Similar behavior is implemented for + ``graphene.ws`` which can deal with calls to the **witness + node**. + + This allows the use of rpc commands similar to the + ``GrapheneAPI`` class: + + .. code-block:: python + + graphene = GrapheneExchange(Config) + # Calls to the cli-wallet + print(graphene.rpc.info()) + # Calls to the witness node + print(graphene.ws.get_account("init0")) + print(graphene.ws.get_asset("USD")) + print(graphene.ws.get_account_count()) + + """ + + #: Wallet connection parameters + wallet_host = "localhost" + wallet_port = 8092 + wallet_user = "" + wallet_password = "" + + #: Witness connection parameter + witness_url = "ws://localhost:8090/" + witness_user = "" + witness_password = "" + + #: The account used here + account = "fabian" + wif = None + + #: Markets to watch. + watch_markets = ["USD_BTS"] + market_separator = "_" + + +class GrapheneExchange(GrapheneClient) : + """ This class serves as an abstraction layer for the decentralized + exchange within the network and simplifies interaction for + trading bots. + + :param config config: Configuration Class, similar to the + example above + + This class tries to map the poloniex API around the DEX but has + some differences: + + * market pairs are denoted as 'quote'_'base', e.g. `USD_BTS` + * Prices/Rates are denoted in 'base', i.e. the USD_BTS market + is priced in BTS per USD. + Example: in the USD_BTS market, a price of 300 means + a USD is worth 300 BTS + * All markets could be considered reversed as well ('BTS_USD') + + Usage: + + .. code-block:: python + + + from grapheneexchange import GrapheneExchange + import json + + + class Config(): + wallet_host = "localhost" + wallet_port = 8092 + wallet_user = "" + wallet_password = "" + witness_url = "ws://10.0.0.16:8090/" + witness_user = "" + witness_password = "" + + watch_markets = ["USD_BTS", "GOLD_BTS"] + market_separator = "_" + account = "fabian" + wif = None + + if __name__ == '__main__': + dex = GrapheneExchange(Config) + print(json.dumps(dex.returnTradeHistory("USD_BTS"),indent=4)) + print(json.dumps(dex.returnTicker(),indent=4)) + print(json.dumps(dex.return24Volume(),indent=4)) + print(json.dumps(dex.returnOrderBook("USD_BTS"),indent=4)) + print(json.dumps(dex.returnBalances(),indent=4)) + print(json.dumps(dex.returnOpenOrders("all"),indent=4)) + print(json.dumps(dex.buy("USD_BTS", 0.001, 10),indent=4)) + print(json.dumps(dex.sell("USD_BTS", 0.001, 10),indent=4)) + """ + markets = {} + + #: store assets as static variable to speed things up! + assets = {} + + #: The trading account + myAccount = None + + def __init__(self, config, **kwargs) : + # Defaults: + self.safe_mode = True + + #: Propose transactions (instead of broadcasting every order, we + # here propose every order in a single proposal + self.propose_only = False + self.propose_operations = [] + + if "safe_mode" in kwargs: + self.safe_mode = kwargs["safe_mode"] + if "propose_only" in kwargs: + self.propose_only = kwargs["propose_only"] + + if "prefix" in kwargs: + self.prefix = kwargs["prefix"] + else: + self.prefix = getattr(config, "prefix", "BTS") + + #: The wif key can be used for creating transactions **if** not + # connected to a cli_wallet + if not hasattr(config, "wif"): + setattr(config, "wif", None) + if not getattr(config, "wif"): + config.wif = None + else: + # Test for valid Private Key + try: + config.wif = str(PrivateKey(config.wif)) + except: + raise InvalidWifKey + + if not hasattr(config, "memo_wif"): + setattr(config, "memo_wif", None) + if not getattr(config, "memo_wif"): + config.memo_wif = None + else: + # Test for valid Private Key + try: + config.memo_wif = str(PrivateKey(config.memo_wif)) + except: + raise InvalidWifKey + + self.config = config + super().__init__(config) + + # Get my Account + self.myAccount = self.getMyAccount() + + if not self.myAccount: + raise ValueError( + "Couldn't find account name %s" % self.config.account + + " on the chain! Please double-check!" + ) + + # Now verify that the given wif key has active permissions: + if getattr(config, "wif") and config.wif: + pubkey = format(PrivateKey(config.wif).pubkey, self.prefix) + if not any(filter( + lambda x: x[0] == pubkey, self.myAccount["active"]["key_auths"] + )): + raise WifNotActive + + def executeOps(self, ops): + expiration = transactions.formatTimeFromNow(30) + ops = transactions.addRequiredFees(self.ws, ops, "1.3.0") + ref_block_num, ref_block_prefix = transactions.getBlockParams(self.ws) + transaction = transactions.Signed_Transaction( + ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops + ) + transaction = transaction.sign([self.config.wif], self.prefix) + transaction = transaction.json() + if not (self.safe_mode or self.propose_only): + self.ws.broadcast_transaction(transaction, api="network_broadcast") + return transaction + + def formatTimeFromNow(self, secs=0): + """ Properly Format Time that is `x` seconds in the future + + :param int secs: Seconds to go in the future (`x>0`) or the + past (`x<0`) + :return: Properly formated time for Graphene (`%Y-%m-%dT%H:%M:%S`) + :rtype: str + + """ + return datetime.utcfromtimestamp(time.time() + int(secs)).strftime('%Y-%m-%dT%H:%M:%S') + + def normalizePrice(self, market, price): + """ Because assets have different precisions and orders are + created with a rational price, prices defined in floats will + slightly differ from the actual prices on the blockchain. + This is a representation issuer between floats being + represented as a ratio of integer (satoshis) + """ + m = self._get_assets_from_market(market) + base = m["base"] + quote = m["quote"] + return float( + (int(price * 10 ** (base["precision"] - quote["precision"])) / + 10 ** (base["precision"] - quote["precision"]))) + + def _get_market_name_from_ids(self, quote_id, base_id) : + """ Returns the properly formated name of a market given base + and quote ids + + :param str quote_id: Object ID of the quote asset + :param str base_id: Object ID of the base asset + :return: Market name with proper separator + :rtype: str + """ + quote = self._get_asset(quote_id) + base = self._get_asset(base_id) + return quote["symbol"] + self.market_separator + base["symbol"] + + def getMyAccount(self): + return self.ws.get_account(self.config.account) + + def _get_asset(self, i): + if i in self.assets: + return self.assets[i] + else: + asset = self.ws.get_asset(i) + self.assets[asset["id"]] = asset + self.assets[asset["symbol"]] = asset + return asset + + def _get_assets_from_ids(self, base_id, quote_id) : + """ Returns assets of a market given base + and quote ids + + :param str quote_id: Object ID of the quote asset + :param str base_id: Object ID of the base asset + :return: object that contains `quote` and `base` asset objects + :rtype: json + """ + quote = self._get_asset(quote_id) + base = self._get_asset(base_id) + return {"quote" : quote, "base" : base} + + def _get_asset_ids_from_name(self, market) : + """ Returns the base and quote ids given a properly formated + market name + + :param str market: Market name (properly separated) + :return: object that contains `quote` asset id and `base` asset id + :rtype: json + """ + quote_symbol, base_symbol = market.split(self.market_separator) + quote = self._get_asset(quote_symbol) + base = self._get_asset(quote_symbol) + return {"quote" : quote["id"], "base" : base["id"]} + + def _get_assets_from_market(self, market) : + """ Returns the base and quote assets given a properly formated + market name + + :param str market: Market name (properly separated) + :return: object that contains `quote` and `base` asset objects + :rtype: json + """ + quote_symbol, base_symbol = market.split(self.market_separator) + quote = self._get_asset(quote_symbol) + base = self._get_asset(base_symbol) + return {"quote" : quote, "base" : base} + + def _get_price(self, o) : + """ Given an object with `quote` and `base`, derive the correct + price. + + :param Object o: Blockchain object that contains `quote` and `asset` amounts and asset ids. + :return: price derived as `base`/`quote` + + Prices/Rates are denoted in 'base', i.e. the USD_BTS market + is priced in BTS per USD. + + **Example:** in the USD_BTS market, a price of 300 means + a USD is worth 300 BTS + + .. note:: + + All prices returned are in the **reveresed** orientation as the + market. I.e. in the BTC/BTS market, prices are BTS per BTC. + That way you can multiply prices with `1.05` to get a +5%. + """ + quote_amount = float(o["quote"]["amount"]) + base_amount = float(o["base"]["amount"]) + quote_id = o["quote"]["asset_id"] + base_id = o["base"]["asset_id"] + base = self._get_asset(base_id) + quote = self._get_asset(quote_id) + # invert price! + if (quote_amount / 10 ** quote["precision"]) > 0.0: + return float((base_amount / 10 ** base["precision"]) / + (quote_amount / 10 ** quote["precision"])) + else: + return None + + def _get_price_filled(self, f, m): + """ A filled order has `receives` and `pays` ops which serve as + `base` and `quote` depending on sell or buy + + :param Object f: Blockchain object for filled orders + :param str m: Market + :return: Price + + Prices/Rates are denoted in 'base', i.e. the USD_BTS market + is priced in BTS per USD. + Example: in the USD_BTS market, a price of 300 means + a USD is worth 300 BTS + + .. note:: + + All prices returned are in the **reveresed** orientation as the + market. I.e. in the BTC/BTS market, prices are BTS per BTC. + That way you can multiply prices with `1.05` to get a +5%. + + """ + r = {} + if f["op"]["receives"]["asset_id"] == m["base"] : + # If the seller received "base" in a quote_base market, than + # it has been a sell order of quote + r["base"] = f["op"]["receives"] + r["quote"] = f["op"]["pays"] + else: + # buy order + r["base"] = f["op"]["pays"] + r["quote"] = f["op"]["receives"] + # invert price! + return self._get_price(r) + + def _get_txorder_price(self, f, m): + """ A newly place limit order has `amount_to_sell` and + `min_to_receive` which serve as `base` and `quote` depending + on sell or buy + + :param Object f: Blockchain object for historical orders + :param str m: Market + :return: Price + """ + r = {} + if f["min_to_receive"]["asset_id"] == m["base"] : + # If the seller received "base" in a quote_base market, than + # it has been a sell order of quote + r["base"] = f["min_to_receive"] + r["quote"] = f["amount_to_sell"] + elif f["min_to_receive"]["asset_id"] == m["quote"]: + # buy order + r["base"] = f["amount_to_sell"] + r["quote"] = f["min_to_receive"] + else : + return None + # invert price! + return self._get_price(r) + + def returnCurrencies(self): + """ In contrast to poloniex, this call returns the assets of the + watched markets only. + + Example Output: + + .. code-block:: js + + {'BTS': {'issuer': '1.2.3', 'id': '1.3.0', 'dynamic_asset_data_id': '2.3.0', 'precision': 5, 'symbol': 'BTS', 'options': {'max_market_fee': '1000000000000000', 'blacklist_authorities': [], 'blacklist_markets': [], 'description': '', 'whitelist_authorities': [], 'market_fee_percent': 0, 'core_exchange_rate': {'base': {'asset_id': '1.3.0', 'amount': 1}, 'quote': {'asset_id': '1.3.0', 'amount': 1}}, 'flags': 0, 'extensions': [], 'whitelist_markets': [], 'issuer_permissions': 0, 'max_supply': '360057050210207'}}, 'GOLD': {'issuer': '1.2.0', 'id': '1.3.106', 'dynamic_asset_data_id': '2.3.106', 'precision': 6, 'bitasset_data_id': '2.4.6', 'symbol': 'GOLD', 'options': {'max_market_fee': '1000000000000000', 'blacklist_authorities': [], 'blacklist_markets': [], 'description': '1 troy ounce .999 fine gold', 'whitelist_authorities': [], 'market_fee_percent': 0, 'core_exchange_rate': {'base': {'asset_id': '1.3.106', 'amount': 1}, 'quote': {'asset_id': '1.3.0', 'amount': 34145}}, 'flags': 128, 'extensions': [], 'whitelist_markets': [], 'issuer_permissions': 511, 'max_supply': '1000000000000000'}}, 'USD': {'issuer': '1.2.0', 'id': '1.3.121', 'dynamic_asset_data_id': '2.3.121', 'precision': 4, 'bitasset_data_id': '2.4.21', 'symbol': 'USD', 'options': {'max_market_fee': '1000000000000000', 'blacklist_authorities': [], 'blacklist_markets': [], 'description': '1 United States dollar', 'whitelist_authorities': [], 'market_fee_percent': 0, 'core_exchange_rate': {'base': {'asset_id': '1.3.121', 'amount': 5}, 'quote': {'asset_id': '1.3.0', 'amount': 15751}}, 'flags': 128, 'extensions': [], 'whitelist_markets': [], 'issuer_permissions': 511, 'max_supply': '1000000000000000'}}} + + """ + r = {} + asset_ids = [] + for market in self.markets : + m = self.markets[market] + asset_ids.append(m["base"]) + asset_ids.append(m["quote"]) + asset_ids_unique = list(set(asset_ids)) + assets = self.ws.get_objects(asset_ids_unique) + for a in assets: + r.update({a["symbol"] : a}) + return r + + def returnFees(self) : + """ Returns a dictionary of all fees that apply through the + network + + Example output: + + .. code-block:: js + + {'proposal_create': {'fee': 400000.0}, + 'asset_publish_feed': {'fee': 1000.0}, 'account_create': + {'basic_fee': 950000.0, 'price_per_kbyte': 20000.0, + 'premium_fee': 40000000.0}, 'custom': {'fee': 20000.0}, + 'asset_fund_fee_pool': {'fee': 20000.0}, + 'override_transfer': {'fee': 400000.0}, 'fill_order': + {}, 'asset_update': {'price_per_kbyte': 20000.0, 'fee': + 200000.0}, 'asset_update_feed_producers': {'fee': + 10000000.0}, 'assert': {'fee': 20000.0}, + 'committee_member_create': {'fee': 100000000.0}} + + """ + r = {} + obj, base = self.ws.get_objects(["2.0.0", "1.3.0"]) + fees = obj["parameters"]["current_fees"]["parameters"] + scale = float(obj["parameters"]["current_fees"]["scale"]) + for f in fees: + op_name = "unknown %d" % f[0] + for name in operations: + if operations[name] == f[0]: + op_name = name + fs = f[1] + for _type in fs : + fs[_type] = float(fs[_type]) * scale / 1e4 / 10 ** base["precision"] + r[op_name] = fs + return r + + def returnTicker(self): + """ Returns the ticker for all markets. + + Output Parameters: + + * ``last``: Price of the order last filled + * ``lowestAsk``: Price of the lowest ask + * ``highestBid``: Price of the highest bid + * ``baseVolume``: Volume of the base asset + * ``quoteVolume``: Volume of the quote asset + * ``percentChange``: 24h change percentage (in %) + * ``settlement_price``: Settlement Price for borrow/settlement + * ``core_exchange_rate``: Core exchange rate for payment of fee in non-BTS asset + * ``price24h``: the price 24h ago + + .. note:: + + All prices returned by ``returnTicker`` are in the **reveresed** + orientation as the market. I.e. in the BTC/BTS market, prices are + BTS per BTC. That way you can multiply prices with `1.05` to + get a +5%. + + The prices in a `quote`/`base` market is denoted in `base` per + `quote`: + + market: USD_BTS - price 300 BTS per USD + + Sample Output: + + .. code-block:: js + + { + "BTS_USD": { + "quoteVolume": 144.1862, + "settlement_price": 0.003009016674102742, + "lowestAsk": 0.002992220227408737, + "baseVolume": 48328.73333, + "percentChange": 2.0000000097901705, + "highestBid": 0.0029411764705882353, + "last": 0.003000000000287946, + "core_exchange_rate": 0.003161120960980772 + }, + "USD_BTS": { + "quoteVolume": 48328.73333, + "settlement_price": 332.3344827586207, + "lowestAsk": 340.0, + "baseVolume": 144.1862, + "percentChange": -1.9607843231354893, + "highestBid": 334.20000000000005, + "last": 333.33333330133934, + "core_exchange_rate": 316.3434782608696 + } + } + + .. note:: A market that has had no trades will result in + prices of "-1" to indicate that no trades have + happend. + + """ + r = {} + for market in self.markets : + m = self.markets[market] + data = {} + quote_asset = self._get_asset(m["quote"]) + base_asset = self._get_asset(m["base"]) + marketHistory = self.ws.get_market_history( + m["quote"], m["base"], + 24 * 60 * 60, + self.formatTimeFromNow(-24 * 60 * 60), + self.formatTimeFromNow(), + api="history") + filled = self.ws.get_fill_order_history( + m["quote"], m["base"], 1, api="history") + # Price and ask/bids + if filled : + data["last"] = self._get_price_filled(filled[0], m) + else : + data["last"] = -1 + + orders = self.ws.get_limit_orders( + m["quote"], m["base"], 1) + if len(orders) > 1: + data["lowestAsk"] = (1 / self._get_price(orders[0]["sell_price"])) + data["highestBid"] = self._get_price(orders[1]["sell_price"]) + else : + data["lowestAsk"] = -1 + data["highestBid"] = -1 + + # Core Exchange rate + if quote_asset["id"] != "1.3.0": + data["core_exchange_rate"] = 1.0 / self._get_price(quote_asset["options"]["core_exchange_rate"]) + else: + data["core_exchange_rate"] = self._get_price(base_asset["options"]["core_exchange_rate"]) + + # smartcoin stuff + if "bitasset_data_id" in quote_asset : + bitasset = self.getObject(quote_asset["bitasset_data_id"]) + backing_asset_id = bitasset["options"]["short_backing_asset"] + if backing_asset_id == base_asset["id"]: + data["settlement_price"] = 1 / self._get_price(bitasset["current_feed"]["settlement_price"]) + elif "bitasset_data_id" in base_asset : + bitasset = self.getObject(base_asset["bitasset_data_id"]) + backing_asset_id = bitasset["options"]["short_backing_asset"] + if backing_asset_id == quote_asset["id"]: + data["settlement_price"] = self._get_price(bitasset["current_feed"]["settlement_price"]) + + if len(marketHistory) : + if marketHistory[0]["key"]["quote"] == m["quote"] : + data["baseVolume"] = float(marketHistory[0]["base_volume"]) / (10 ** base_asset["precision"]) + data["quoteVolume"] = float(marketHistory[0]["quote_volume"]) / (10 ** quote_asset["precision"]) + price24h = ((float(marketHistory[0]["open_base"]) / 10 ** base_asset["precision"]) / + (float(marketHistory[0]["open_quote"]) / 10 ** quote_asset["precision"])) + else : + #: Looks weird but is correct: + data["baseVolume"] = float(marketHistory[0]["quote_volume"]) / (10 ** base_asset["precision"]) + data["quoteVolume"] = float(marketHistory[0]["base_volume"]) / (10 ** quote_asset["precision"]) + price24h = ((float(marketHistory[0]["open_quote"]) / 10 ** base_asset["precision"]) / + (float(marketHistory[0]["open_base"]) / 10 ** quote_asset["precision"])) + data["price24h"] = price24h + data["percentChange"] = ((data["last"] / price24h - 1) * 100) + else : + data["baseVolume"] = 0 + data["quoteVolume"] = 0 + data["percentChange"] = 0 + r.update({market : data}) + return r + + def return24Volume(self): + """ Returns the 24-hour volume for all markets, plus totals for primary currencies. + + Sample output: + + .. code-block:: js + + { + "USD_BTS": { + "BTS": 361666.63617, + "USD": 1087.0 + }, + "GOLD_BTS": { + "BTS": 0, + "GOLD": 0 + } + } + + """ + r = {} + for market in self.markets : + m = self.markets[market] + marketHistory = self.ws.get_market_history( + m["quote"], m["base"], + 24 * 60 * 60, + self.formatTimeFromNow(-24 * 60 * 60), + self.formatTimeFromNow(), + api="history") + quote_asset = self._get_asset(m["quote"]) + base_asset = self._get_asset(m["base"]) + data = {} + if len(marketHistory) : + if marketHistory[0]["key"]["quote"] == m["quote"] : + data[m["base_symbol"]] = float(marketHistory[0]["base_volume"]) / (10 ** base_asset["precision"]) + data[m["quote_symbol"]] = float(marketHistory[0]["quote_volume"]) / (10 ** quote_asset["precision"]) + else : + data[m["base_symbol"]] = float(marketHistory[0]["quote_volume"]) / (10 ** base_asset["precision"]) + data[m["quote_symbol"]] = float(marketHistory[0]["base_volume"]) / (10 ** quote_asset["precision"]) + else : + data[m["base_symbol"]] = 0 + data[m["quote_symbol"]] = 0 + r.update({market : data}) + return r + + def returnOrderBook(self, currencyPair="all", limit=25): + """ Returns the order book for a given market. You may also + specify "all" to get the orderbooks of all markets. + + :param str currencyPair: Return results for a particular market only (default: "all") + :param int limit: Limit the amount of orders (default: 25) + + Ouput is formated as::: + + [price, amount, orderid] + + * price is denoted in base per quote + * amount is in quote + + Sample output: + + .. code-block:: js + + {'USD_BTS': {'asks': [[0.0003787878787878788, 203.1935], + [0.0003799587270281197, 123.65374999999999]], 'bids': + [[0.0003676470588235294, 9.9], [0.00036231884057971015, + 10.0]]}, 'GOLD_BTS': {'asks': [[2.25e-05, + 0.045000000000000005], [2.3408239700374533e-05, + 0.33333333333333337]], 'bids': [[2.0833333333333333e-05, + 0.4], [1.851851851851852e-05, 0.0001]]}} + + .. note:: A maximum of 25 orders will be returned! + + """ + r = {} + if currencyPair == "all" : + markets = list(self.markets.keys()) + else: + markets = [currencyPair] + for market in markets : + m = self.markets[market] + orders = self.ws.get_limit_orders( + m["quote"], m["base"], limit) + quote_asset = self._get_asset(m["quote"]) + base_asset = self._get_asset(m["base"]) + asks = [] + bids = [] + for o in orders: + if o["sell_price"]["base"]["asset_id"] == m["base"] : + price = self._get_price(o["sell_price"]) + volume = float(o["for_sale"]) / 10 ** base_asset["precision"] / self._get_price(o["sell_price"]) + bids.append([price, volume, o["id"]]) + else : + price = 1 / self._get_price(o["sell_price"]) + volume = float(o["for_sale"]) / 10 ** quote_asset["precision"] + asks.append([price, volume, o["id"]]) + + data = {"asks" : asks, "bids" : bids} + r.update({market : data}) + return r + + def returnBalances(self): + """ Returns all of your balances. + + Example Output: + + .. code-block:: js + + { + "BROWNIE.PTS": 2499.9999, + "EUR": 0.0028, + "BTS": 1893552.94893, + "OPENBTC": 0.00110581, + "GREENPOINT": 0.0 + } + + """ + balances = self.ws.get_account_balances(self.myAccount["id"], []) + asset_ids = [a["asset_id"] for a in balances] + assets = self.ws.get_objects(asset_ids) + data = {} + for i, asset in enumerate(assets) : + amount = float(balances[i]["amount"]) / 10 ** asset["precision"] + if amount == 0.0: + continue + data[asset["symbol"]] = amount + return data + + def returnOpenOrdersIds(self, currencyPair="all"): + """ Returns only the ids of open Orders + """ + r = {} + if currencyPair == "all" : + markets = list(self.markets.keys()) + else: + markets = [currencyPair] + orders = self.ws.get_full_accounts([self.myAccount["id"]], False)[0][1]["limit_orders"] + for market in markets : + r[market] = [] + for o in orders: + for market in markets : + m = self.markets[market] + if ((o["sell_price"]["base"]["asset_id"] == m["base"] and + o["sell_price"]["quote"]["asset_id"] == m["quote"]) or + (o["sell_price"]["base"]["asset_id"] == m["quote"] and + o["sell_price"]["quote"]["asset_id"] == + m["base"])): + r[market].append(o["id"]) + return r + + def returnOpenOrders(self, currencyPair="all"): + """ Returns your open orders for a given market, specified by + the "currencyPair. + + :param str currencyPair: Return results for a particular market only (default: "all") + + Output Parameters: + + - `type`: sell or buy order for `quote` + - `rate`: price for `base` per `quote` + - `orderNumber`: identifier (e.g. for cancelation) + - `amount`: amount of quote + - `total`: amount of base at asked price (amount/price) + - `amount_to_sell`: "amount_to_sell" + + .. note:: Ths method will not show orders of markets that + are **not** in the ``watch_markets`` array! + + Example: + + .. code-block:: js + + { + "USD_BTS": [ + { + "orderNumber": "1.7.1505", + "type": "buy", + "rate": 341.74559999999997, + "total": 341.74559999999997, + "amount": 1.0 + }, + { + "orderNumber": "1.7.1512", + "type": "buy", + "rate": 325.904045, + "total": 325.904045, + "amount": 1.0 + }, + { + "orderNumber": "1.7.1513", + "type": "sell", + "rate": 319.45050000000003, + "total": 31945.05, + "amount": 1020486.2195025001 + } + ] + } + + """ + r = {} + if currencyPair == "all" : + markets = list(self.markets.keys()) + else: + markets = [currencyPair] + orders = self.ws.get_full_accounts([self.myAccount["id"]], False)[0][1]["limit_orders"] + for market in markets : + r[market] = [] + for o in orders: + base_id = o["sell_price"]["base"]["asset_id"] + base_asset = self._get_asset(base_id) + for market in markets : + m = self.markets[market] + if (o["sell_price"]["base"]["asset_id"] == m["base"] and + o["sell_price"]["quote"]["asset_id"] == m["quote"]): + # buy + amount = float(o["for_sale"]) / 10 ** base_asset["precision"] / self._get_price(o["sell_price"]) + rate = self._get_price(o["sell_price"]) + t = "buy" + total = amount * rate + for_sale = float(o["for_sale"]) / 10 ** base_asset["precision"] + elif (o["sell_price"]["base"]["asset_id"] == m["quote"] and + o["sell_price"]["quote"]["asset_id"] == m["base"]): + # sell + amount = float(o["for_sale"]) / 10 ** base_asset["precision"] + rate = 1 / self._get_price(o["sell_price"]) + t = "sell" + total = amount * rate + for_sale = float(o["for_sale"]) / 10 ** base_asset["precision"] + else : + continue + r[market].append({"rate" : rate, + "amount" : amount, + "total" : total, + "type" : t, + "amount_to_sell" : for_sale, + "orderNumber" : o["id"]}) + return r + + def returnOpenOrdersStruct(self, currencyPair="all"): + """ This method is similar to ``returnOpenOrders`` but has a different + output format: + + Example: + + .. code-block:: js + + { + "USD_BTS": { + "1.7.1505": { + "orderNumber": "1.7.1505", + "type": "buy", + "rate": 341.74559999999997, + "total": 341.74559999999997, + "amount": 1.0 + }, + "1.7.1512": { + "orderNumber": "1.7.1512", + "type": "buy", + "rate": 325.904045, + "total": 325.904045, + "amount": 1.0 + }, + "1.7.1513": { + "orderNumber": "1.7.1513", + "type": "sell", + "rate": 319.45050000000003, + "total": 31945.05, + "amount": 1020486.2195025001 + } + ] + } + """ + orders = self.returnOpenOrders(currencyPair) + r = {} + for market in orders: + r[market] = {} + for order in orders[market]: + r[market][order["orderNumber"]] = order + return r + + def returnTradeHistory(self, currencyPair="all", limit=25): + """ Returns your trade history for a given market, specified by + the "currencyPair" parameter. You may also specify "all" to + get the orderbooks of all markets. + + :param str currencyPair: Return results for a particular market only (default: "all") + :param int limit: Limit the amount of orders (default: 25) + + Output Parameters: + + - `type`: sell or buy + - `rate`: price for `quote` denoted in `base` per `quote` + - `amount`: amount of quote + - `total`: amount of base at asked price (amount/price) + + """ + r = {} + if currencyPair == "all" : + markets = list(self.markets.keys()) + else: + markets = [currencyPair] + for market in markets : + m = self.markets[market] + filled = self.ws.get_fill_order_history( + m["quote"], m["base"], 2 * limit, api="history") + trades = [] + for f in filled: + data = {} + data["date"] = f["time"] + data["rate"] = self._get_price_filled(f, m) + quote = self._get_asset(m["quote"]) + if f["op"]["account_id"] == self.myAccount["id"]: + if f["op"]["pays"]["asset_id"] == m["base"] : + data["type"] = "buy" + data["amount"] = int(f["op"]["receives"]["amount"]) / 10 ** quote["precision"] + else : + data["type"] = "sell" + data["amount"] = int(f["op"]["pays"]["amount"]) / 10 ** quote["precision"] + data["total"] = data["amount"] * data["rate"] + trades.append(data) + r.update({market : trades}) + return r + + def buy(self, + currencyPair, + rate, + amount, + expiration=7 * 24 * 60 * 60, + killfill=False, + returnID=False): + """ Places a buy order in a given market (buy ``quote``, sell + ``base`` in market ``quote_base``). Required POST parameters + are "currencyPair", "rate", and "amount". If successful, the + method will return the order creating (signed) transaction. + + :param str currencyPair: Return results for a particular market only (default: "all") + :param float price: price denoted in ``base``/``quote`` + :param number amount: Amount of ``quote`` to buy + :param number expiration: (optional) expiration time of the order in seconds (defaults to 7 days) + :param bool killfill: flag that indicates if the order shall be killed if it is not filled (defaults to False) + :param bool returnID: If this flag is True, the call will wait for the order to be included in a block and return it's id + + Prices/Rates are denoted in 'base', i.e. the USD_BTS market + is priced in BTS per USD. + + **Example:** in the USD_BTS market, a price of 300 means + a USD is worth 300 BTS + + .. note:: + + All prices returned are in the **reveresed** orientation as the + market. I.e. in the BTC/BTS market, prices are BTS per BTC. + That way you can multiply prices with `1.05` to get a +5%. + """ + if self.safe_mode : + log.warn("Safe Mode enabled! Not broadcasting anything!") + # We buy quote and pay with base + quote_symbol, base_symbol = currencyPair.split(self.market_separator) + base = self._get_asset(base_symbol) + quote = self._get_asset(quote_symbol) + if self.rpc: + transaction = self.rpc.sell_asset(self.config.account, + '{:.{prec}f}'.format(amount * rate, prec=base["precision"]), + base_symbol, + '{:.{prec}f}'.format(amount, prec=quote["precision"]), + quote_symbol, + expiration, + killfill, + not (self.safe_mode or self.propose_only)) + jsonOrder = transaction["operations"][0][1] + elif self.config.wif: + s = {"fee": {"amount": 0, "asset_id": "1.3.0"}, + "seller": self.myAccount["id"], + "amount_to_sell": {"amount": int(amount * rate * 10 ** base["precision"]), + "asset_id": base["id"] + }, + "min_to_receive": {"amount": int(amount * 10 ** quote["precision"]), + "asset_id": quote["id"] + }, + "expiration": transactions.formatTimeFromNow(expiration), + "fill_or_kill": killfill, + } + order = transactions.Limit_order_create(**s) + ops = [transactions.Operation(order)] + transaction = self.executeOps(ops) + else: + raise NoWalletException() + + if returnID: + return self._waitForOperationsConfirmation(jsonOrder) + else: + if self.propose_only: + [self.propose_operations.append(o) for o in transaction["operations"]] + return self.propose_operations + else: + return transaction + + def sell(self, + currencyPair, + rate, + amount, + expiration=7 * 24 * 60 * 60, + killfill=False, + returnID=False): + """ Places a sell order in a given market (sell ``quote``, buy + ``base`` in market ``quote_base``). Required POST parameters + are "currencyPair", "rate", and "amount". If successful, the + method will return the order creating (signed) transaction. + + :param str currencyPair: Return results for a particular market only (default: "all") + :param float price: price denoted in ``base``/``quote`` + :param number amount: Amount of ``quote`` to sell + :param number expiration: (optional) expiration time of the order in seconds (defaults to 7 days) + :param bool killfill: flag that indicates if the order shall be killed if it is not filled (defaults to False) + :param bool returnID: If this flag is True, the call will wait for the order to be included in a block and return it's id + + Prices/Rates are denoted in 'base', i.e. the USD_BTS market + is priced in BTS per USD. + + **Example:** in the USD_BTS market, a price of 300 means + a USD is worth 300 BTS + + .. note:: + + All prices returned are in the **reveresed** orientation as the + market. I.e. in the BTC/BTS market, prices are BTS per BTC. + That way you can multiply prices with `1.05` to get a +5%. + """ + if self.safe_mode : + log.warn("Safe Mode enabled! Not broadcasting anything!") + # We sell quote and pay with base + quote_symbol, base_symbol = currencyPair.split(self.market_separator) + base = self._get_asset(base_symbol) + quote = self._get_asset(quote_symbol) + if self.rpc: + transaction = self.rpc.sell_asset(self.config.account, + '{:.{prec}f}'.format(amount, prec=quote["precision"]), + quote_symbol, + '{:.{prec}f}'.format(amount * rate, prec=base["precision"]), + base_symbol, + expiration, + killfill, + not (self.safe_mode or self.propose_only)) + jsonOrder = transaction["operations"][0][1] + elif self.config.wif: + s = {"fee": {"amount": 0, "asset_id": "1.3.0"}, + "seller": self.myAccount["id"], + "amount_to_sell": {"amount": int(amount * 10 ** quote["precision"]), + "asset_id": quote["id"] + }, + "min_to_receive": {"amount": int(amount * rate * 10 ** base["precision"]), + "asset_id": base["id"] + }, + "expiration": transactions.formatTimeFromNow(expiration), + "fill_or_kill": killfill, + } + order = transactions.Limit_order_create(**s) + ops = [transactions.Operation(order)] + transaction = self.executeOps(ops) + else: + raise NoWalletException() + + if returnID: + return self._waitForOperationsConfirmation(jsonOrder) + else: + if self.propose_only: + [self.propose_operations.append(o) for o in transaction["operations"]] + return self.propose_operations + else: + return transaction + + def _waitForOperationsConfirmation(self, thisop): + if self.safe_mode: + return "Safe Mode enabled, can't obtain an orderid" + + log.debug("Waiting for operation to be included in block: %s" % str(thisop)) + counter = -2 + blocknum = int(self.ws.get_dynamic_global_properties()["head_block_number"]) + for block in self.ws.block_stream(start=blocknum - 2, mode="head"): + counter += 1 + for tx in block["transactions"]: + for i, op in enumerate(tx["operations"]): + if deep_eq.deep_eq(op[1], thisop): + return (tx["operation_results"][i][1]) + if counter > 10: + raise Exception("The operation has not been added after 10 blocks!") + + def list_debt_positions(self): + """ List Call Positions (borrowed assets and amounts) + + :return: Struct of assets with amounts and call price + :rtype: json + + **Example**: + + .. code-block: js + + {'USD': {'collateral': '865893.75000', + 'collateral_asset': 'BTS', + 'debt': 120.00000} + + """ + debts = self.ws.get_full_accounts([self.myAccount["id"]], False)[0][1]["call_orders"] + r = {} + for debt in debts: + base = self.getObject(debt["call_price"]["base"]["asset_id"]) + quote = self.getObject(debt["call_price"]["quote"]["asset_id"]) + + if "bitasset_data_id" not in quote: + continue + + bitasset = self.getObject(quote["bitasset_data_id"]) + settlement_price = self._get_price(bitasset["current_feed"]["settlement_price"]) + + if not settlement_price: + continue + + call_price = self._get_price(debt["call_price"]) + collateral_amount = int(debt["collateral"]) / 10 ** base["precision"] + debt_amount = int(debt["debt"]) / 10 ** quote["precision"] + + r[quote["symbol"]] = {"collateral_asset" : base["symbol"], + "collateral" : collateral_amount, + "debt" : debt_amount, + "call_price" : call_price, + "settlement_price": settlement_price, + "ratio" : collateral_amount / debt_amount * settlement_price} + return r + + def close_debt_position(self, symbol): + """ Close a debt position and reclaim the collateral + + :param str symbol: Symbol to close debt position for + :raises ValueError: if symbol has no open call position + """ + if self.safe_mode : + log.warn("Safe Mode enabled! Not broadcasting anything!") + debts = self.list_debt_positions() + if symbol not in debts: + raise ValueError("No call position open for %s" % symbol) + debt = debts[symbol] + asset = self._get_asset(symbol) + collateral_asset = self._get_asset(debt["collateral_asset"]) + + if self.rpc: + transaction = self.rpc.borrow_asset(self.config.account, + '{:.{prec}f}'.format(-debt["debt"], prec=asset["precision"]), + symbol, + '{:.{prec}f}'.format(-debt["collateral"], prec=collateral_asset["precision"]), + not (self.safe_mode or self.propose_only)) + elif self.config.wif: + s = {'fee': {'amount': 0, 'asset_id': '1.3.0'}, + 'delta_debt': {'amount': int(-debt["debt"] * 10 ** asset["precision"]), + 'asset_id': asset["id"]}, + 'delta_collateral': {'amount': int(-debt["collateral"] * 10 ** collateral_asset["precision"]), + 'asset_id': collateral_asset["id"]}, + 'funding_account': self.myAccount["id"], + 'extensions': []} + ops = [transactions.Operation(transactions.Call_order_update(**s))] + ops = transactions.addRequiredFees(self.ws, ops, "1.3.0") + transaction = self.executeOps(ops) + else: + raise NoWalletException() + + if self.propose_only: + [self.propose_operations.append(o) for o in transaction["operations"]] + return self.propose_operations + else: + return transaction + + def adjust_debt(self, delta_debt, symbol, new_collateral_ratio=None): + """ Adjust the amount of debt for an asset + + :param float delta_debt: Delta of the debt (-10 means reduce debt by 10, +10 means borrow another 10) + :param str symbol: Asset to borrow + :param float new_collateral_ratio: collateral ratio to maintain (optional, by default tries to maintain old ratio) + :raises ValueError: if symbol is not a bitasset + :raises ValueError: if collateral ratio is smaller than maintenance collateral ratio + :raises ValueError: if required amounts of collateral are not available + """ + if self.safe_mode : + log.warn("Safe Mode enabled! Not broadcasting anything!") + # We sell quote and pay with base + asset = self._get_asset(symbol) + if "bitasset_data_id" not in asset: + raise ValueError("%s is not a bitasset!" % symbol) + bitasset = self.getObject(asset["bitasset_data_id"]) + + # Check minimum collateral ratio + backing_asset_id = bitasset["options"]["short_backing_asset"] + maintenance_col_ratio = bitasset["current_feed"]["maintenance_collateral_ratio"] / 1000 + if maintenance_col_ratio > new_collateral_ratio: + raise ValueError("Collateral Ratio has to be higher than %5.2f" % maintenance_col_ratio) + + # Derive Amount of Collateral + collateral_asset = self._get_asset(backing_asset_id) + settlement_price = self._get_price(bitasset["current_feed"]["settlement_price"]) + + current_debts = self.list_debt_positions() + if symbol not in current_debts: + raise ValueError("No Call position available to adjust! Please borrow first!") + + amount_of_collateral = (current_debts[symbol]["debt"] + delta_debt) * new_collateral_ratio / settlement_price + amount_of_collateral -= current_debts[symbol]["collateral"] + + # Verify that enough funds are available + balances = self.returnBalances() + fundsNeeded = amount_of_collateral + self.returnFees()["call_order_update"]["fee"] + fundsHave = balances[collateral_asset["symbol"]] + if fundsHave <= fundsNeeded: + raise ValueError("Not enough funds available. Need %f %s, but only %f %s are available" % + (fundsNeeded, collateral_asset["symbol"], fundsHave, collateral_asset["symbol"])) + + # Borrow + if self.rpc: + transaction = self.rpc.borrow_asset(self.config.account, + '{:.{prec}f}'.format(delta_debt, prec=asset["precision"]), + symbol, + '{:.{prec}f}'.format(amount_of_collateral, prec=collateral_asset["precision"]), + not (self.safe_mode or self.propose_only)) + elif self.config.wif: + s = {'fee': {'amount': 0, 'asset_id': '1.3.0'}, + 'delta_debt': {'amount': int(delta_debt * 10 ** asset["precision"]), + 'asset_id': asset["id"]}, + 'delta_collateral': {'amount': int(amount_of_collateral * 10 ** collateral_asset["precision"]), + 'asset_id': collateral_asset["id"]}, + 'funding_account': self.myAccount["id"], + 'extensions': []} + ops = [transactions.Operation(transactions.Call_order_update(**s))] + ops = transactions.addRequiredFees(self.ws, ops, "1.3.0") + transaction = self.executeOps(ops) + else: + raise NoWalletException() + + if self.propose_only: + [self.propose_operations.append(o) for o in transaction["operations"]] + return self.propose_operations + else: + return transaction + + def adjust_collateral_ratio(self, symbol, target_collateral_ratio): + """ Adjust the collataral ratio of a debt position + + :param float amount: Amount to borrow (denoted in 'asset') + :param str symbol: Asset to borrow + :param float target_collateral_ratio: desired collateral ratio + :raises ValueError: if symbol is not a bitasset + :raises ValueError: if collateral ratio is smaller than maintenance collateral ratio + :raises ValueError: if required amounts of collateral are not available + """ + return self.adjust_debt(0, symbol, target_collateral_ratio) + + def borrow(self, amount, symbol, collateral_ratio): + """ Borrow bitassets/smartcoins from the network by putting up + collateral in a CFD at a given collateral ratio. + + :param float amount: Amount to borrow (denoted in 'asset') + :param str symbol: Asset to borrow + :param float collateral_ratio: Collateral ratio to borrow at + :raises ValueError: if symbol is not a bitasset + :raises ValueError: if collateral ratio is smaller than maintenance collateral ratio + :raises ValueError: if required amounts of collateral are not available + + Example Output: + + .. code-block:: js + + { + "ref_block_num": 14705, + "signatures": [], + "extensions": [], + "expiration": "2016-01-11T15:14:30", + "operations": [ + [ + 3, + { + "funding_account": "1.2.282", + "delta_collateral": { + "amount": 1080540000, + "asset_id": "1.3.0" + }, + "extensions": [], + "delta_debt": { + "amount": 10000, + "asset_id": "1.3.106" + }, + "fee": { + "amount": 100000, + "asset_id": "1.3.0" + } + } + ] + ], + "ref_block_prefix": 1284843328 + } + + + """ + if self.safe_mode : + log.warn("Safe Mode enabled! Not broadcasting anything!") + # We sell quote and pay with base + asset = self._get_asset(symbol) + if "bitasset_data_id" not in asset: + raise ValueError("%s is not a bitasset!" % symbol) + bitasset = self.getObject(asset["bitasset_data_id"]) + + # Check minimum collateral ratio + backing_asset_id = bitasset["options"]["short_backing_asset"] + maintenance_col_ratio = bitasset["current_feed"]["maintenance_collateral_ratio"] / 1000 + if maintenance_col_ratio > collateral_ratio: + raise ValueError("Collateral Ratio has to be higher than %5.2f" % maintenance_col_ratio) + + # Derive Amount of Collateral + collateral_asset = self._get_asset(backing_asset_id) + settlement_price = self._get_price(bitasset["current_feed"]["settlement_price"]) + amount_of_collateral = amount * collateral_ratio / settlement_price + + # Verify that enough funds are available + balances = self.returnBalances() + fundsNeeded = amount_of_collateral + self.returnFees()["call_order_update"]["fee"] + fundsHave = balances[collateral_asset["symbol"]] + if fundsHave <= fundsNeeded: + raise ValueError("Not enough funds available. Need %f %s, but only %f %s are available" % + (fundsNeeded, collateral_asset["symbol"], fundsHave, collateral_asset["symbol"])) + + # Borrow + if self.rpc: + transaction = self.rpc.borrow_asset(self.config.account, + '{:.{prec}f}'.format(amount, prec=asset["precision"]), + symbol, + '{:.{prec}f}'.format(amount_of_collateral, prec=collateral_asset["precision"]), + not (self.safe_mode or self.propose_only)) + elif self.config.wif: + s = {'fee': {'amount': 0, 'asset_id': '1.3.0'}, + 'delta_debt': {'amount': int(amount * 10 ** asset["precision"]), + 'asset_id': asset["id"]}, + 'delta_collateral': {'amount': int(amount_of_collateral * 10 ** collateral_asset["precision"]), + 'asset_id': collateral_asset["id"]}, + 'funding_account': self.myAccount["id"], + 'extensions': []} + ops = [transactions.Operation(transactions.Call_order_update(**s))] + ops = transactions.addRequiredFees(self.ws, ops, "1.3.0") + transaction = self.executeOps(ops) + else: + raise NoWalletException() + + if self.propose_only: + [self.propose_operations.append(o) for o in transaction["operations"]] + return self.propose_operations + else: + return transaction + + def cancel(self, orderNumber): + """ Cancels an order you have placed in a given market. Requires + only the "orderNumber". An order number takes the form + ``1.7.xxx``. + + :param str orderNumber: The Order Object ide of the form ``1.7.xxxx`` + """ + if self.safe_mode : + log.warn("Safe Mode enabled! Not broadcasting anything!") + if self.rpc: + transaction = self.rpc.cancel_order(orderNumber, not (self.safe_mode or self.propose_only)) + elif self.config.wif: + s = {"fee": {"amount": 0, "asset_id": "1.3.0"}, + "fee_paying_account": self.myAccount["id"], + "order": orderNumber, + "extensions": [] + } + ops = [transactions.Operation(transactions.Limit_order_cancel(**s))] + ops = transactions.addRequiredFees(self.ws, ops, "1.3.0") + transaction = self.executeOps(ops) + else: + raise NoWalletException() + + if self.propose_only: + [self.propose_operations.append(o) for o in transaction["operations"]] + return self.propose_operations + else: + return transaction + + def withdraw(self, currency, amount, address): + """ This Method makes no sense in a decentralized exchange + """ + raise NotImplementedError("No withdrawing from the DEX! " + "Please use 'transfer'!") + + def get_lowest_ask(self, currencyPair="all"): + """ Returns the lowest asks (including amount) for the selected + markets. + + :param str currencyPair: Market for which to get the lowest ask + :return: lowest asks and amounts + :rtype: json + + .. code-block:: js + + {'TRADE.BTC_BTC': [0.8695652173913043, 0.0207]} + + """ + orders = self.returnOrderBook(currencyPair, limit=1) + r = {} + for market in orders: + if len(orders[market]["asks"]) > 0: + r[market] = orders[market]["asks"][0] + return r + + def get_lowest_bid(self, currencyPair="all"): + """ Returns the highest bids (including amount) for the selected + markets. + + :param str currencyPair: Market for which to get the highest bid + :return: highest bid and amounts + :rtype: json + + Example: + + .. code-block:: js + + {'TRADE.BTC_BTC': [1.0055304172951232, 0.009945]} + + """ + orders = self.returnOrderBook(currencyPair, limit=1) + r = {} + for market in orders: + if len(orders[market]["bids"]) > 0: + r[market] = orders[market]["bids"][0] + return r + + def get_bids_more_than(self, market, price, limit=25): + """ Returns those bids (order ids) that have a price more than ``price`` + together with volume and actual price. + + :param str market: Market to consider + :param float price: Price threshold + :param number limit: Limit to x bids (defaults to 25) + + Output format: + + .. code-block:: js + + [[price, volume, id], [price, volume, id], ...] + + Example output: + + .. code-block:: js + + { + [ + 0.9945, + 0.01, + "1.7.32504" + ], + [ + 0.9900000120389315, + 0.79741296, + "1.7.25548" + ] + } + """ + orders = self.returnOrderBook(market, limit) + bids = [] + for o in orders[market]["bids"]: + if o[0] > price: + bids.append(o) + return bids + + def get_asks_less_than(self, market, price, limit=25): + """ Returns those asks (order ids) that have a price less than ``price`` + together with volume and actual price. + + :param str market: Market to consider + :param float price: Price threshold + :param number limit: Limit to x bids (defaults to 25) + + Output format: + + .. code-block:: js + + [[price, volume, id], [price, volume, id], ...] + + Example output: + + .. code-block:: js + + { + [ + 0.9945, + 0.01, + "1.7.32504" + ], + [ + 0.9900000120389315, + 0.79741296, + "1.7.25548" + ] + } + """ + orders = self.returnOrderBook(market, limit) + asks = [] + for o in orders[market]["asks"]: + if o[0] < price: + asks.append(o) + return asks + + def get_my_bids_more_than(self, market, price): + """ This call will return those open orders that have a price + that is more than ``price`` + """ + myOrders = self.returnOpenOrders(market) + r = [] + for order in myOrders[market]: + if order["type"] == "buy" and order["rate"] < price: + r.append(order) + return r + + def get_my_asks_less_than(self, market, price): + """ This call will return those open orders that have a price + that is less than ``price`` + """ + myOrders = self.returnOpenOrders(market) + r = [] + for order in myOrders[market]: + if order["type"] == "sell" and order["rate"] > price: + r.append(order) + return r + + def get_my_bids_out_of_range(self, market, price, tolerance): + """ This call will return those open bid orders that have a price + that is more than ``tolerance`` away from price + """ + myOrders = self.returnOpenOrders(market) + r = [] + for order in myOrders[market]: + if order["type"] == "buy" and math.fabs(order["rate"] - price) > tolerance: + r.append(order) + return r + + def get_my_asks_out_of_range(self, market, price, tolerance): + """ This call will return those open ask orders that have a price + that is more than ``tolerance`` away from price + """ + myOrders = self.returnOpenOrders(market) + r = [] + for order in myOrders[market]: + if order["type"] == "sell" and math.fabs(order["rate"] - price) > tolerance: + r.append(order) + return r + + def cancel_bids_more_than(self, market, price): + orders = self.get_my_bids_more_than(market, price) + canceledOrders = [] + for order in orders: + self.cancel(order["orderNumber"]) + canceledOrders.append(order["orderNumber"]) + return canceledOrders + + def cancel_asks_less_than(self, market, price): + orders = self.get_my_asks_less_than(market, price) + canceledOrders = [] + for order in orders: + self.cancel(order["orderNumber"]) + canceledOrders.append(order["orderNumber"]) + return canceledOrders + + def cancel_bids_out_of_range(self, market, price, tolerance): + orders = self.get_my_bids_out_of_range(market, price, tolerance) + canceledOrders = [] + for order in orders: + self.cancel(order["orderNumber"]) + canceledOrders.append(order["orderNumber"]) + return canceledOrders + + def cancel_asks_out_of_range(self, market, price, tolerance): + orders = self.get_my_asks_out_of_range(market, price, tolerance) + canceledOrders = [] + for order in orders: + self.cancel(order["orderNumber"]) + canceledOrders.append(order["orderNumber"]) + return canceledOrders + + def propose_all(self, expiration=None, proposer=None): + """ If ``proposal_only`` is set True, this method needs to be + called to **actuctually** propose the operations on the + chain. + + :param time expiration: expiration time formated as ``%Y-%m-%dT%H:%M:%S`` (defaults to 24h) + :param string proposer: name of the account that pays the proposer fee + """ + if not proposer: + proposer = self.config.account + if not expiration: + expiration = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24).strftime('%Y-%m-%dT%H:%M:%S') + account = self.ws.get_account(proposer) + proposal = Proposal(self) + return proposal.propose_operations(self.propose_operations, + expiration, + account["id"], + broadcast=not self.safe_mode) + + def proposals_clear(self): + """ Clear stored proposals + """ + self.propose_operations = [] + + def fund_fee_pool(self, symbol, amount): + """ Fund the fee pool of an asset with BTS + + :param str symbol: Symbol of the asset to fund + :param float amount: Amount of BTS to use for funding fee pool + """ + if self.safe_mode : + log.warn("Safe Mode enabled! Not broadcasting anything!") + if self.rpc: + transaction = self.rpc.fund_asset_fee_pool(self.config.account, symbol, amount, not (self.safe_mode or self.propose_only)) + elif self.config.wif: + asset = self._get_asset(symbol) + s = {"fee": {"amount": 0, + "asset_id": "1.3.0" + }, + "from_account": self.myAccount["id"], + "asset_id": asset["id"], + "amount": int(amount * 10 ** asset["precision"]), + "extensions": [] + } + ops = [transactions.Operation(transactions.Asset_fund_fee_pool(**s))] + transaction = self.executeOps(ops) + else: + raise NoWalletException() + + if self.propose_only: + [self.propose_operations.append(o) for o in transaction["operations"]] + return self.propose_operations + else: + return transaction + + def transfer(self, amount, symbol, recepient, memo=""): + """ Fund the fee pool of an asset with BTS + + :param float amount: Amount to transfer + :param str symbol: Asset to transfer ("SBD" or "STEEM") + :param str recepient: Recepient of the transfer + :param str memo: (Optional) Memo attached to the transfer + + If you want to use a memo you need to specify `memo_wif` in + the configuration (similar to `wif`). + """ + if self.safe_mode : + log.warn("Safe Mode enabled! Not broadcasting anything!") + if self.rpc: + transaction = self.rpc.transfer( + self.config.account, + recepient, + amount, + symbol, + memo, + not (self.safe_mode or self.propose_only) + ) + elif self.config.wif: + from_account = self.myAccount + to_account = self.ws.get_account(recepient) + asset = self._get_asset(symbol) + s = { + "fee": {"amount": 0, + "asset_id": "1.3.0" + }, + "from": from_account["id"], + "to": to_account["id"], + "amount": {"amount": int(amount * 10 ** asset["precision"]), + "asset_id": asset["id"] + } + } + if memo: + if not self.config.memo_wif: + print("Missing memo private key! " + "Please define `memo_wif` in your configuration") + return + import random + nonce = str(random.getrandbits(64)) + encrypted_memo = Memo.encode_memo(PrivateKey(self.config.memo_wif), + PublicKey(to_account["options"]["memo_key"]), + nonce, + memo) + memoStruct = {"from": from_account["options"]["memo_key"], + "to": to_account["options"]["memo_key"], + "nonce": nonce, + "message": encrypted_memo, + "chain": "BTS"} + s["memo"] = transactions.Memo(**memoStruct) + + ops = [transactions.Operation(transactions.Transfer(**s))] + transaction = self.executeOps(ops) + else: + raise NoWalletException() + + if self.propose_only: + [self.propose_operations.append(o) for o in transaction["operations"]] + return self.propose_operations + else: + return transaction diff --git a/bitshares/proposal.py b/bitshares/proposal.py new file mode 100644 index 0000000000000000000000000000000000000000..6a211c73a9aa1a03138b473f5559fa2a045461fa --- /dev/null +++ b/bitshares/proposal.py @@ -0,0 +1,128 @@ +from grapheneapi.grapheneclient import GrapheneClient +from datetime import datetime +import time + +# from graphenebase.transactions import operations + + +class Proposal(GrapheneClient) : + """ Manage Proposals + + :param grapheneapi.GrapheneClient grapheneClient: Grapehen + Client instance with connection details for RPC + *and* websocket connection + + """ + def __init__(self, *args, **kwargs) : + super(Proposal, self).__init__(*args, **kwargs) + + def approve_available_proposals(self, from_account, approving_account) : + """ Approve all proposals for a given account with given approver + + :param str from_account: account name to approve *all* proposals for + :param str approving_account: approving account + + """ + fromAccount = self.rpc.get_account(from_account) + approving_account = self.rpc.get_account(approving_account) + proposals = self.rpc.ws.get_proposed_transactions(fromAccount["id"]) + for proposal in proposals : + if approving_account["id"] in proposal["available_active_approvals"] : + print("%s: Proposal %s already approved. Expires on %s UTC" % + (fromAccount["name"], proposal['id'], proposal["expiration_time"])) + else : + print("%s: Approving Proposal %s ..." % + (fromAccount["name"], proposal['id'])) + self.rpc.approve_proposal(approving_account["name"], + proposal["id"], + {"active_approvals_to_add" : [approving_account["name"]]}, + True) + + def propose_transfer(self, proposer_account, from_account, to_account, + amount, asset, expiration=3600, broadcast=True): + """ Propose a Transfer Transaction (opid=0) + + :param str proposer_account: Account that proposed the transfer (and pays the proposal fee) + :param str from_account: Account to transfer from (pays tx fee) + :param str to_account: Account to transfer to + :param init amount: Amount to transfer (*not* in satoshi, e.g. 100.112 BTS) + :param str asset: Symbol or id of the asset to transfer + :param expiration: Expiration of the proposal (default: 60min) + :param bool broadcast: Broadcast signed transaction or not + + .. note:: This method requires + ``propose_builder_transaction2`` to be available in the + cli_wallet + + """ + proposer = self.rpc.get_account(proposer_account) + fromAccount = self.rpc.get_account(from_account) + toAccount = self.rpc.get_account(to_account) + asset = self.rpc.get_asset(asset) + op = self.rpc.get_prototype_operation("transfer_operation") + + op[1]["amount"]["amount"] = int(amount * 10 ** asset["precision"]) + op[1]["amount"]["asset_id"] = asset["id"] + op[1]["from"] = fromAccount["id"] + op[1]["to"] = toAccount["id"] + + exp_time = datetime.utcfromtimestamp(time.time() + int(expiration)).strftime('%Y-%m-%dT%H:%M:%S') + buildHandle = self.rpc.begin_builder_transaction() + self.rpc.add_operation_to_builder_transaction(buildHandle, op) + self.rpc.set_fees_on_builder_transaction(buildHandle, asset["id"]) + self.rpc.propose_builder_transaction2(buildHandle, proposer["name"], exp_time, 0, False) + self.rpc.set_fees_on_builder_transaction(buildHandle, asset["id"]) + return self.rpc.sign_builder_transaction(buildHandle, broadcast) + + def propose_operations(self, ops, expiration, proposer_account, preview=0, broadcast=False): + """ Propose several operations + + :param Array ops: Array of operations + :param time expiration: Expiration time in format '%Y-%m-%dT%H:%M:%S' + :param proposer_account: Account name or id of the proposer (pays the proposal fee) + :param number preview: Preview period (in seconds) + :param bool broadcast: If true, broadcasts the transaction + :return: Signed transaction + :rtype: json + + Once a proposal has been signed, the corresponding + transaction hash can be obtained via: + + .. code-block:: python + + print(rpc.get_transaction_id(tx)) + """ + + proposer = self.rpc.get_account(proposer_account) + buildHandle = self.rpc.begin_builder_transaction() + for op in ops: + self.rpc.add_operation_to_builder_transaction(buildHandle, op) + self.rpc.set_fees_on_builder_transaction(buildHandle, "1.3.0") + self.rpc.propose_builder_transaction2(buildHandle, proposer["name"], expiration, preview, False) + self.rpc.set_fees_on_builder_transaction(buildHandle, "1.3.0") + return self.rpc.sign_builder_transaction(buildHandle, broadcast) + +# ## Alternative implementation building the transactions +# ## manually. Not yet working though +# op = self.rpc.get_prototype_operation("proposal_create_operation") +# for o in ops : +# op[1]["proposed_ops"].append(o) +# op[1]["expiration_time"] = expiration +# op[1]["fee_paying_account"] = payee_id +# op[1]["fee"] = self.get_operations_fee(op, "1.3.0") +# buildHandle = self.rpc.begin_builder_transaction() +# from pprint import pprint +# pprint(op) +# self.rpc.add_operation_to_builder_transaction(buildHandle, op) +# # print(self.rpc.preview_builder_transaction(buildHandle)) +# return self.rpc.sign_builder_transaction(buildHandle, broadcast) + +# def get_operations_fee(self, op, asset_id): +# global_parameters = self.rpc.get_object("2.0.0")[0]["parameters"]["current_fees"] +# parameters = global_parameters["parameters"] +# scale = global_parameters["scale"] / 1e4 +# opID = op[0] +# assert asset_id == "1.3.0", "asset_id has to be '1.3.0'" +# # FIXME limition to "fee"-only! Need to evaluate every other as well +# return {"amount": parameters[opID][1]["fee"], +# "asset_id": asset_id} diff --git a/bitsharesapi/__init__.py b/bitsharesapi/__init__.py index cdc3d412669c1d6b3f92b4d105ba3bbac2012463..d37cf63d77ed56fe5bfbab04a887ef1cc5735738 100644 --- a/bitsharesapi/__init__.py +++ b/bitsharesapi/__init__.py @@ -1 +1,6 @@ -__ALL__ = [] +__all__ = ['graphenewsprotocol', + 'graphenews', + 'grapheneapi', + 'grapheneclient', + 'graphenewsrpc' + ] diff --git a/bitsharesapi/api.py b/bitsharesapi/api.py new file mode 100644 index 0000000000000000000000000000000000000000..76c895617b9b90336b385beb2bcbef423dfbae60 --- /dev/null +++ b/bitsharesapi/api.py @@ -0,0 +1,6 @@ +from grapheneapi.grapheneapi import GrapheneAPI + + +class BitSharesAPI(GrapheneAPI): + def __init__(self, *args, **kwargs): + super(BitSharesAPI, self).__init__(*args, **kwargs) diff --git a/bitsharesapi/noderpc.py b/bitsharesapi/noderpc.py new file mode 100644 index 0000000000000000000000000000000000000000..c8f84fced1bd60bce38d839bd5d3ee6b64172508 --- /dev/null +++ b/bitsharesapi/noderpc.py @@ -0,0 +1,248 @@ +import sys +import threading +import websocket +import ssl +import json +import time +from itertools import cycle +from grapheneapi.graphenewsrpc import GrapheneWebsocketRPC +import logging +log = logging.getLogger(__name__) + + +class RPCError(Exception): + pass + + +class NumRetriesReached(Exception): + pass + + +class BitSharesWebsocketRPC(GrapheneWebsocketRPC): + + def __init__(self, *args, **kwargs): + super(BitSharesWebsocketRPC, self).__init__(*args, **kwargs) + + def register_apis(self): + self.api_id["database"] = self.database(api_id=1) + self.api_id["history"] = self.history(api_id=1) + self.api_id["network_broadcast"] = self.network_broadcast(api_id=1) + + def get_account(self, name, **kwargs): + """ Get full account details from account name or id + + :param str name: Account name or account id + """ + if len(name.split(".")) == 3: + return self.get_objects([name])[0] + else: + return self.get_account_by_name(name, **kwargs) + + def get_asset(self, name, **kwargs): + """ Get full asset from name of id + + :param str name: Symbol name or asset id (e.g. 1.3.0) + """ + if len(name.split(".")) == 3: + return self.get_objects([name], **kwargs)[0] + else: + return self.lookup_asset_symbols([name], **kwargs)[0] + + def loop_account_history(self, account, start=0, only_ops=[]): + """ Returns a generator for individual account transactions + + :param str account: account name to get history for + :param int start: sequence number of the first transaction to return + :param array only_ops: Limit generator by these operations (ids) + """ + account = self.get_account(account) + cnt = 0 + while True: + ret = self.get_relative_account_history( + account["id"], + start, + 100, + start + 101, + api="history", + )[::-1] + for i in ret: + if not only_ops or i["op"][0] in only_ops: + cnt += 1 + yield i + if len(ret) < 100: + break + + start += 100 + + def getFullAccountHistory(self, account, begin=1, limit=100, sort="block", **kwargs): + """ Get History of an account + + :param string account: account name or account id + :param number begin: sequence number of first element + :param number limit: limit number of entries + :param string sort: Either "block" or "reversed" + + + **Example:** + + The following code will give you you the first 110 + operations for the account ``faucet`` starting at the first + operation: + + .. code-block:: python + + client = GrapheneClient(config) + client.ws.getAccountHistory( + "faucet", + begin=1, + limit=110, + ) + + """ + if account[0:4] == "1.2.": + account_id = account + else: + account_id = self.get_account_by_name(account, **kwargs)["id"] + + if begin < 1: + raise ValueError("begin cannot be smaller than 1") + + if sort != "block": + raise Exception("'sort' can currently only be 'block' " + + "due to backend API issues") + + r = [] + if limit <= 100: + if sort == "block": + ret = self.get_relative_account_history( + account_id, + begin, + limit, + begin + limit, + api="history", **kwargs + ) + [r.append(a) for a in ret[::-1]] + else: + ret = self.get_relative_account_history( + account_id, + begin, + limit, + 0, + api="history" + ) + [r.append(a) for a in ret] + else: + while True: + + if len(r) + 100 > limit: + thislimit = limit - len(r) + else: + thislimit = 100 + + if sort == "block": + ret = self.get_relative_account_history( + account_id, + begin, + thislimit, + begin + thislimit, + api="history", **kwargs + ) + [r.append(a) for a in ret[::-1]] + begin += thislimit + else: + ret = self.get_relative_account_history( + account_id, + begin, + thislimit, + 0, + api="history", **kwargs + ) + [r.append(a) for a in ret] + + if len(ret) < 100: + break + + return r + + def block_stream(self, start=None, mode="irreversible", **kwargs): + """ Yields blocks starting from ``start``. + + :param int start: Starting block + :param str mode: We here have the choice between + * "head": the last block + * "irreversible": the block that is confirmed by 2/3 of all block producers and is thus irreversible! + """ + # Let's find out how often blocks are generated! + config = self.get_global_properties(**kwargs) + block_interval = config["parameters"]["block_interval"] + + if not start: + props = self.get_dynamic_global_properties(**kwargs) + # Get block number + if mode == "head": + start = props['head_block_number'] + elif mode == "irreversible": + start = props['last_irreversible_block_num'] + else: + raise ValueError( + '"mode" has to be "head" or "irreversible"' + ) + + # We are going to loop indefinitely + while True: + + # Get chain properies to identify the + # head/last reversible block + props = self.get_dynamic_global_properties(**kwargs) + + # Get block number + if mode == "head": + head_block = props['head_block_number'] + elif mode == "irreversible": + head_block = props['last_irreversible_block_num'] + else: + raise ValueError( + '"mode" has to be "head" or "irreversible"' + ) + + # Blocks from start until head block + for blocknum in range(start, head_block + 1): + # Get full block + yield self.get_block(blocknum, **kwargs) + + # Set new start + start = head_block + 1 + + # Sleep for one block + time.sleep(block_interval) + + def stream(self, opName, *args, **kwargs): + """ Yield specific operations (e.g. transfers) only + + :param str opName: Name of the operation, e.g. transfer, + limit_order_create, limit_order_cancel, call_order_update, + fill_order, account_create, account_update, + account_whitelist, account_upgrade, account_transfer, + asset_create, asset_update, asset_update_bitasset, + asset_update_feed_producers, asset_issue, asset_reserve, + asset_fund_fee_pool, asset_settle, asset_global_settle, + asset_publish_feed, witness_create, witness_update, + proposal_create, proposal_update, proposal_delete, + withdraw_permission_create, withdraw_permission_update, + withdraw_permission_claim, withdraw_permission_delete, + committee_member_create, committee_member_update, + committee_member_update_global_parameters, + vesting_balance_create, vesting_balance_withdraw, + worker_create, custom, assert, balance_claim, + override_transfer, transfer_to_blind, blind_transfer, + transfer_from_blind, asset_settle_cancel, asset_claim_fees + :param int start: Begin at this block + """ + from bitsharesbase.operations import getOperationNameForId + for block in self.block_stream(*args, **kwargs): + if not len(block["transactions"]): + continue + for tx in block["transactions"]: + for op in tx["operations"]: + if getOperationNameForId(op[0]) == opName: + yield op[1] diff --git a/bitsharesapi/websocket.py b/bitsharesapi/websocket.py new file mode 100644 index 0000000000000000000000000000000000000000..07d2a0f3cbe873c9d575053cde9ea2e25afbd832 --- /dev/null +++ b/bitsharesapi/websocket.py @@ -0,0 +1,253 @@ +import time +import asyncio +import ssl +from collections import OrderedDict + +try: + from autobahn.asyncio.websocket import WebSocketClientFactory +except ImportError: + raise ImportError("Missing dependency: autobahn") + +try: + from autobahn.websocket.protocol import parseWsUrl +except: + from autobahn.websocket.util import parse_url as parseWsUrl + +from .websocketprotocol import BitSharesWebsocketProtocol +from .noderpc import BitSharesWebsocketRPC + +import logging +log = logging.getLogger(__name__) + +#: max number of objects to chache + + +class LimitedSizeDict(OrderedDict): + """ This class limits the size of the objectMap to + ``max_cache_objects`` (default_ 50). + + All objects received are stored in the objectMap and get_object + calls will lookup most objects from this structure + """ + + max_cache_objects = 50 + + def __init__(self, *args, **kwds): + if "max_cache_objects" in kwds: + self.max_cache_objects = kwds["max_cache_objects"] + self.size_limit = kwds.pop("size_limit", self.max_cache_objects) + OrderedDict.__init__(self, *args, **kwds) + self._check_size_limit() + + def __setitem__(self, key, value): + OrderedDict.__setitem__(self, key, value) + self.move_to_end(key, last=False) + self._check_size_limit() + + def _check_size_limit(self): + if self.size_limit is not None: + while len(self) > self.size_limit: + self.popitem(last=False) # False -> FIFO + +# def __getitem__(self, key): +# """ keep the element longer in the memory by moving it to the end +# """ +# # self.move_to_end(key, last=False) +# return OrderedDict.__getitem__(self, key) + + +class BitSharesWebsocket(BitSharesWebsocketRPC): + """ This class serves as a management layer for the websocket + connection and configuration of the websocket sub-protocol. + + In order to receive notifications of object changes from the + witness, we need to interface with the websockets protocol. + + To do so, we have developed a `BitSharesWebsocketProtocol`, an + extension to `WebSocketClientProtocol` as provided by + `autobahn.asyncio.websocket`. + """ + + def __init__(self, url, username="", password="", + proto=BitSharesWebsocketProtocol): + """ Open A BitSharesWebsocketRPC connection that can handle + notifications though asynchronous calls. + + :param str url: Url to the websocket server + :param str username: Username for login + :param str password: Password for login + :param BitSharesWebsocketProtocol proto: (optional) Protocol that inherits ``BitSharesWebsocketProtocol`` + """ + ssl, host, port, resource, path, params = parseWsUrl(url) + self.url = url + self.username = username + self.password = password + + # Open another RPC connection to execute calls + BitSharesWebsocketRPC.__init__(self, url, username, password) + + # Parameters for another connection for asynchronous notifications + self.ssl = ssl + self.host = host + self.port = port + self.proto = proto + self.proto.username = username + self.proto.password = password + self.objectMap = LimitedSizeDict() + self.proto.objectMap = self.objectMap # this is a reference + self.factory = None + + def get_object(self, oid): + """ Get_Object as a passthrough from get_objects([array]) + Attention: This call requires GrapheneAPI because it is a non-blocking + JSON query + + :param str oid: Object ID to fetch + """ + return self.get_objects([oid])[0] + + def getObject(self, oid): + """ Lookup objects from the object storage and if not available, + request object from the API + """ + if self.objectMap is not None and oid in self.objectMap: + return self.objectMap[oid] + else: + data = self.get_object(oid) + self.objectMap[oid] = data + return data + + def connect(self): + """ Create websocket factory by Autobahn + """ + self.factory = WebSocketClientFactory(self.url) + self.factory.protocol = self.proto + + def run_forever(self): + """ Run websocket forever and wait for events. + + This method will try to keep the connection alive and try an + autoreconnect if the connection closes. + """ + if not issubclass(self.factory.protocol, BitSharesWebsocketProtocol): + raise Exception("When using run(), we need websocket " + + "notifications which requires the " + + "configuration/protocol to inherit " + + "'BitSharesWebsocketProtocol'") + + loop = asyncio.get_event_loop() + # forward loop into protocol so that we can issue a reset from the + # protocol: + self.factory.protocol.setLoop(self.factory.protocol, loop) + + while True: + try: + if self.ssl: + context = ssl.create_default_context() + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + coro = loop.create_connection(self.factory, self.host, + self.port, ssl=context) + else: + coro = loop.create_connection(self.factory, self.host, + self.port, ssl=self.ssl) + + loop.run_until_complete(coro) + loop.run_forever() + except KeyboardInterrupt: + break + except: + pass + + log.error("Trying to re-connect in 10 seconds!") + time.sleep(10) + + log.info("Good bye!") + loop.close() + + def setObjectCallbacks(self, callbacks): + """ Define Callbacks on Objects for websocket connections + + :param json callbacks: A object/callback json structur to + register object updates with a + callback + + The object/callback structure looks as follows: + + .. code-block: json + + { + "2.0.0": print, + "object-id": fnt-callback + } + """ + self.proto.database_callbacks = callbacks + + def setAccountsDispatcher(self, accounts, callback): + """ Subscribe to Full Account Updates + + :param accounts: Accounts to subscribe to + :type accounts: array of account IDs + :param fnt callback: function to be called on notifications + """ + self.proto.accounts = accounts + self.proto.accounts_callback = callback + + def setEventCallbacks(self, callbacks): + """ Set Event Callbacks of the subsystem + + :param json callbacks: event/fnt json object + + Available events: + + * ``connection-init`` + * ``connection-opened`` + * ``connection-closed`` + * ``registered-database`` + * ``registered-history`` + * ``registered-network-broadcast`` + * ``registered-network-node`` + + """ + for key in callbacks: + self.proto.onEventCallbacks[key] = callbacks[key] + + def setMarketCallBack(self, markets): + """ Define Callbacks on Market Events for websocket connections + + :param markets: Array of market pairs to register to + :type markets: array of asset pairs + + Example + + .. code-block:: python + + market = {"quote": quote["id"], + "base": base["id"], + "base_symbol": base["symbol"], + "quote_symbol": quote["symbol"], + "callback": print} + setMarketCallBack([market]) + + """ + self.proto.markets = markets + + def setAssetDispatcher(self, assets): + """ Define Callbacks on Asset Events for websocket connections + + :param markets: Array of Assets to register to + :type markets: array of asset pairs + + Example + + .. code-block:: python + + asset = {"id": "1.3.121", + "bitasset_data_id": "2.4.21", + "dynamic_asset_data_id": "2.3.121", + "symbol": "USD", + "callback": print} + setAssetCallBack([asset]) + + """ + self.proto.assets = assets diff --git a/bitsharesapi/websocketprotocol.py b/bitsharesapi/websocketprotocol.py new file mode 100644 index 0000000000000000000000000000000000000000..4673aeaa711b88ddf4405db4767e573ea6734e67 --- /dev/null +++ b/bitsharesapi/websocketprotocol.py @@ -0,0 +1,392 @@ +import json +from functools import partial +import warnings +import logging +log = logging.getLogger(__name__) + + +try: + from autobahn.asyncio.websocket import WebSocketClientProtocol +except ImportError: + raise ImportError("Missing dependency 'autobahn'.") + + +class BitSharesWebsocketProtocol(WebSocketClientProtocol): + """ Graphene Websocket Protocol is the class that will be used + within the websocket subsystem Autobahn to interact with your + API on messages, notifications, and events. + + This class handles the actual calls and graphene-specific + behavior. + """ + + #: loop will be used to indicate the loss of connection + loop = None + + #: Database callbacks and IDs for object subscriptions + database_callbacks = {} + database_callbacks_ids = {} + + #: Accounts and callbacks for account updates + accounts = [] + accounts_callback = None + + #: Markets to subscribe to + markets = [] + + #: Assets to subscribe to + assets = [] + + #: Storage of Objects to reduce latency and load + objectMap = None + + #: Event Callback registrations and fnts + onEventCallbacks = {} + + #: Registered APIs with corresponding API-IDs + api_ids = {} + + #: Incremental Request ID and request storage (FIXME: request storage + #: is not cleaned up) + request_id = 0 + requests = {} + + def __init__(self): + pass + + def _get_request_id(self): + self.request_id += 1 + return self.request_id + + """ Basic RPC connection + """ + def wsexec(self, params, callback=None): + """ Internally used method to execute calls + + :param json params: parameters defining the actual call + :param fnt callback: Callback to be executed upon receiption + of the answer (defaults to ``None``) + """ + request = {"request" : {}, "callback" : None} + request["id"] = self._get_request_id() + request["request"]["id"] = self.request_id + request["request"]["method"] = "call" + request["request"]["params"] = params + request["callback"] = callback + self.requests.update({self.request_id: request}) + log.debug(request["request"]) + self.sendMessage(json.dumps(request["request"]).encode('utf8')) + + def register_api(self, name): + """ Register to an API of graphene + + :param str name: Name of the API (e.g. database, history, + ...) + """ + self.wsexec([1, name, []], [partial(self._set_api_id, name)]) + + def _set_api_id(self, name, data): + """ Set the API id as returned from the server + + :param str name: Name of the API + :param int data: API id as returned by the server + + """ + self.api_ids.update({name : data}) + if name == "database": + self.eventcallback("registered-database") + elif name == "history": + self.eventcallback("registered-history") + elif name == "network_broadcast": + self.eventcallback("registered-network-broadcast") + elif name == "network_node": + self.eventcallback("registered-network-node") + + def _login(self): + """ Login to the API + """ + log.info("login") + self.wsexec([1, "login", [self.username, self.password]]) + + """ Subscriptions + """ + def subscribe_to_accounts(self, account_ids, *args): + """ Subscribe to account ids + + :param account_ids: Account ids to register to + :type account_ids: Array of account IDs + + """ + log.info("subscribe_to_accounts") + self.wsexec([0, "get_full_accounts", [account_ids, True]]) + + def subscribe_to_markets(self, dummy=None): + """ Subscribe to the markets as defined in ``self.markets``. + """ + log.info("subscribe_to_markets") + for m in self.markets: + market = self.markets[m] + self.wsexec([0, "subscribe_to_market", + [self._get_request_id(), + market["quote"], + market["base"]]]) + + def subscribe_to_objects(self, *args): + """ Subscribe to objects as described in + + * ``self.database_callbacks`` + * ``self.accounts`` + * ``self.assets`` + + and set the subscription callback. + """ + log.info("subscribe_to_objects") + handles = [] + for handle in self.database_callbacks: + handles.append(partial(self.getObject, handle)) + self.database_callbacks_ids.update({ + handle: self.database_callbacks[handle]}) + + asset_ids = set() + for m in self.assets: + asset_ids.add(m["id"]) + if "bitasset_data_id" in m: + asset_ids.add(m["bitasset_data_id"]) + if "dynamic_asset_data_id" in m: + asset_ids.add(m["dynamic_asset_data_id"]) + handles.append(partial(self.getObjects, list(asset_ids))) + + if self.accounts: + handles.append(partial(self.subscribe_to_accounts, self.accounts)) + self.wsexec([self.api_ids["database"], + "set_subscribe_callback", + [self._get_request_id(), False]], handles) + + """ Objects + """ + def getObject(self, oid, callback=None, *args): + """ Get an Object from the internal object storage if available + or otherwise retrieve it from the API. + + :param object-id oid: Object ID to retrieve + :param fnt callback: Callback to call if object has been received + """ + self.getObjects([oid], callback, *args) + + def getObjects(self, oids, callback=None, *args): + # Are they stored in memory already? + for oid in oids: + if (self.objectMap and + oid in self.objectMap and + callable(callback)): + callback(self.objectMap[oid]) + oids.remove(oid) + # Let's get those that we haven't found in memory! + if oids: + self.wsexec([self.api_ids["database"], + "get_objects", + [oids]], callback) + + def setObject(self, oid, data): + """ Set Object in the internal Object Storage + """ + self.setObjects([oid], [data]) + + def setObjects(self, oids, datas): + if self.objectMap is None: + return + + for i, oid in enumerate(oids): + self.objectMap[oid] = datas[i] + + """ Callbacks and dispatcher + """ + def eventcallback(self, name): + """ Call an event callback + + :param str name: Name of the event + """ + if (name in self.onEventCallbacks and + callable(self.onEventCallbacks[name])): + self.onEventCallbacks[name](self) + + def dispatchNotice(self, notice): + """ Main Message Dispatcher for notifications as called by + ``onMessage``. This dispatcher will separated object, + account and market updates from each other and call the + corresponding callbacks. + + :param json notice: Notice from the API + + """ + if "id" not in notice: + return + oid = notice["id"] + [inst, _type, _id] = oid.split(".") + account_ids = [] + for a in self.accounts : + account_ids.append("2.6.%s" % a.split(".")[2]) # account history + account_ids.append("1.2.%s" % a.split(".")[2]) # account + try: + " Object Subscriptions " + if (oid in self.database_callbacks_ids and + callable(self.database_callbacks_ids[oid])): + self.database_callbacks_ids[oid](self, notice) + + " Account Notifications " + if (callable(self.accounts_callback) and + (oid in account_ids or # account updates + inst == "1" and _type == "10")): # proposals + self.accounts_callback(notice) + + " Market notifications " + if inst == "1" and _type == "7": + for m in self.markets: + market = self.markets[m] + if not callable(market["callback"]): + continue + if(((market["quote"] == notice["sell_price"]["quote"]["asset_id"] and + market["base"] == notice["sell_price"]["base"]["asset_id"]) or + (market["base"] == notice["sell_price"]["quote"]["asset_id"] and + market["quote"] == notice["sell_price"]["base"]["asset_id"]))): + market["callback"](self, notice) + + " Asset notifications " + if (inst == "1" and _type == "3" or # Asset itself + # bitasset and dynamic data + inst == "2" and (_type == "4" or _type == "3")): + for asset in self.assets: + if not callable(asset["callback"]): + continue + if (asset.get("id") == notice["id"] or + asset.get("bitasset_data_id", None) == notice["id"] or + asset.get("dynamic_asset_data_id", None) == notice["id"]): + asset["callback"](self, notice) + + except: + import traceback + log.error('Error dispatching notice: %s' % str(traceback.format_exc())) + + def onConnect(self, response): + """ Is executed on successful connect. Calls event + ``connection-init``. + """ + self.request_id = 1 + log.debug("Server connected: {0}".format(response.peer)) + self.eventcallback("connection-init") + + def onOpen(self): + """ Called if connection Opened successfully. Logs into the API, + requests access to APIs and calls event + ``connection-opened``. + """ + log.debug("WebSocket connection open.") + self._login() + + " Register with database " + self.wsexec([1, "database", []], [ + partial(self._set_api_id, "database"), + self.subscribe_to_objects, + self.subscribe_to_markets]) + + self.register_api("history") +# self.register_api("network_node") + self.register_api("network_broadcast") + self.eventcallback("connection-opened") + + def onMessage(self, payload, isBinary): + """ Main websocket message dispatcher. + + This message separates distinct client initiated responses + from server initiated event-driven notifications and either + calls the corresponding callback or the notification + dispatcher. + + :param binary payload: data received through the connection + :param bool isBinary: Flag to indicate binary nature of the + payload + """ + res = json.loads(payload.decode('utf8')) + log.debug(res) + if "error" not in res: + " Resolve answers from RPC calls " + if "id" in res: + if res["id"] not in self.requests: + log.warning("Received answer to an unknown request?!") + else: + callbacks = self.requests[res["id"]]["callback"] + if callable(callbacks): + callbacks(res["result"]) + elif isinstance(callbacks, list): + for callback in callbacks: + callback(res["result"]) + elif "method" in res: + " Run registered call backs for individual object notices " + if res["method"] == "notice": + [self.setObject(notice["id"], notice) + for notice in res["params"][1][0] if "id" in notice] + [self.dispatchNotice(notice) + for notice in res["params"][1][0] if "id" in notice] + else: + log.error("Error! ", res) + + def setLoop(self, loop): + """ Define the asyncio loop so that it can be halted on + disconnects + """ + self.loop = loop + + def connection_lost(self, errmsg): + """ Is called if the connection is lost. Calls event + ``connection-closed`` and closes the asyncio main loop. + """ + log.info("WebSocket connection closed: {0}".format(errmsg)) + self.loop.stop() + self.eventcallback("connection-closed") + + def onClose(self, wasClean, code, reason): + self.connection_lost(reason) + + """ L E G A C Y - C A L L S + """ + def getAccountHistory(self, account_id, callback, + start="1.11.0", stop="1.11.0", limit=100): + """ Get Account history History and call callback + + :param account-id account_id: Account ID to read the history for + :param fnt callback: Callback to execute with the response + :param historyID start: Start of the history (defaults to ``1.11.0``) + :param historyID stop: Stop of the history (defaults to ``1.11.0``) + :param historyID stop: Limit entries by (defaults to ``100``, max ``100``) + :raises ValueError: if the account id is incorrectly formatted + """ + warnings.warn( + "getAccountHistory is deprecated! " + "Use client.ws.get_account_history() instead", + DeprecationWarning + ) + if account_id[0:4] == "1.2." : + self.wsexec([self.api_ids["history"], + "get_account_history", + [account_id, start, 100, stop]], + callback) + else : + raise ValueError("getAccountHistory expects an account" + + "id of the form '1.2.x'!") + + def getAccountProposals(self, account_ids, callback): + """ Get Account Proposals and call callback + + :param array account_ids: Array containing account ids + :param fnt callback: Callback to execute with the response + + """ + warnings.warn( + "getAccountProposals is deprecated! " + "Use client.ws.get_proposed_transactions() instead", + DeprecationWarning + ) + self.wsexec([self.api_ids["database"], + "get_proposed_transactions", + account_ids], + callback) diff --git a/bitsharesbase/__init__.py b/bitsharesbase/__init__.py index cdc3d412669c1d6b3f92b4d105ba3bbac2012463..8d66c2a251966080207424801a438a8044d11b00 100644 --- a/bitsharesbase/__init__.py +++ b/bitsharesbase/__init__.py @@ -1 +1,11 @@ -__ALL__ = [] +__all__ = [ + 'account', + 'bip38', + 'chains', + 'memo', + 'objects', + 'objecttypes' + 'operations', + 'signedtransactions', + 'transactions', +] diff --git a/bitsharesbase/account.py b/bitsharesbase/account.py new file mode 100644 index 0000000000000000000000000000000000000000..434de80cb2e67761f17424484f0131f5785bdab1 --- /dev/null +++ b/bitsharesbase/account.py @@ -0,0 +1,110 @@ +from graphenebase.account import ( + PasswordKey as GPHPasswordKey, + BrainKey as GPHBrainKey, + Address as GPHAddress, + PublicKey as GPHPublicKey, + PrivateKey as GPHPrivateKey +) + + +class PasswordKey(GPHPasswordKey): + """ This class derives a private key given the account name, the + role and a password. It leverages the technology of Brainkeys + and allows people to have a secure private key by providing a + passphrase only. + """ + + def __init__(self, *args, **kwargs): + super(PasswordKey, self).__init__(*args, **kwargs) + + +class BrainKey(GPHBrainKey): + """Brainkey implementation similar to the graphene-ui web-wallet. + + :param str brainkey: Brain Key + :param int sequence: Sequence number for consecutive keys + + Keys in Graphene are derived from a seed brain key which is a string of + 16 words out of a predefined dictionary with 49744 words. It is a + simple single-chain key derivation scheme that is not compatible with + BIP44 but easy to use. + + Given the brain key, a private key is derived as:: + + privkey = SHA256(SHA512(brainkey + " " + sequence)) + + Incrementing the sequence number yields a new key that can be + regenerated given the brain key. + """ + + def __init__(self, *args, **kwargs): + super(BrainKey, self).__init__(*args, **kwargs) + + +class Address(GPHAddress): + """ Address class + + This class serves as an address representation for Public Keys. + + :param str address: Base58 encoded address (defaults to ``None``) + :param str pubkey: Base58 encoded pubkey (defaults to ``None``) + :param str prefix: Network prefix (defaults to ``BTS``) + + Example:: + + Address("BTSFN9r6VYzBK8EKtMewfNbfiGCr56pHDBFi") + + """ + def __init__(self, *args, **kwargs): + kwargs["prefix"] = "BTS" # make prefix BTS + super(Address, self).__init__(*args, **kwargs) + + +class PublicKey(GPHPublicKey): + """ This class deals with Public Keys and inherits ``Address``. + + :param str pk: Base58 encoded public key + :param str prefix: Network prefix (defaults to ``BTS``) + + Example::: + + PublicKey("BTS6UtYWWs3rkZGV8JA86qrgkG6tyFksgECefKE1MiH4HkLD8PFGL") + + .. note:: By default, graphene-based networks deal with **compressed** + public keys. If an **uncompressed** key is required, the + method ``unCompressed`` can be used:: + + PublicKey("xxxxx").unCompressed() + + """ + def __init__(self, *args, **kwargs): + kwargs["prefix"] = "BTS" # make prefix BTS + super(PublicKey, self).__init__(*args, **kwargs) + + +class PrivateKey(GPHPrivateKey): + """ Derives the compressed and uncompressed public keys and + constructs two instances of ``PublicKey``: + + :param str wif: Base58check-encoded wif key + :param str prefix: Network prefix (defaults to ``BTS``) + + Example::: + + PrivateKey("5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd") + + Compressed vs. Uncompressed: + + * ``PrivateKey("w-i-f").pubkey``: + Instance of ``PublicKey`` using compressed key. + * ``PrivateKey("w-i-f").pubkey.address``: + Instance of ``Address`` using compressed key. + * ``PrivateKey("w-i-f").uncompressed``: + Instance of ``PublicKey`` using uncompressed key. + * ``PrivateKey("w-i-f").uncompressed.address``: + Instance of ``Address`` using uncompressed key. + + """ + def __init__(self, *args, **kwargs): + kwargs["prefix"] = "BTS" # make prefix BTS + super(PrivateKey, self).__init__(*args, **kwargs) diff --git a/bitsharesbase/bip38.py b/bitsharesbase/bip38.py new file mode 100644 index 0000000000000000000000000000000000000000..a7967b0879b6b529d44caf5765683dae6eb9bf90 --- /dev/null +++ b/bitsharesbase/bip38.py @@ -0,0 +1,30 @@ +from graphenebase.bip38 import ( + encrypt as GPHencrypt, + decrypt as GPHdecrypt +) + + +def encrypt(privkey, passphrase): + """ BIP0038 non-ec-multiply encryption. Returns BIP0038 encrypted privkey. + + :param privkey: Private key + :type privkey: Base58 + :param str passphrase: UTF-8 encoded passphrase for encryption + :return: BIP0038 non-ec-multiply encrypted wif key + :rtype: Base58 + + """ + return GPHencrypt(privkey, passphrase) + + +def decrypt(encrypted_privkey, passphrase): + """BIP0038 non-ec-multiply decryption. Returns WIF privkey. + + :param Base58 encrypted_privkey: Private key + :param str passphrase: UTF-8 encoded passphrase for decryption + :return: BIP0038 non-ec-multiply decrypted key + :rtype: Base58 + :raises SaltException: if checksum verification failed (e.g. wrong password) + + """ + return GPHdecrypt(encrypted_privkey, passphrase) diff --git a/bitsharesbase/chains.py b/bitsharesbase/chains.py new file mode 100644 index 0000000000000000000000000000000000000000..a17fe58fefbf3c091258592a632c579926d4ec5d --- /dev/null +++ b/bitsharesbase/chains.py @@ -0,0 +1,11 @@ +default_prefix = "BTS" +known_chains = {"BTS": {"chain_id": "4018d7844c78f6a6c41c6a552b898022310fc5dec06da467ee7905a8dad512c8", + "core_symbol": "BTS", + "prefix": "BTS"}, + "GPH": {"chain_id": "b8d1603965b3eb1acba27e62ff59f74efa3154d43a4188d381088ac7cdf35539", + "core_symbol": "CORE", + "prefix": "GPH"}, + "TEST": {"chain_id": "39f5e2ede1f8bc1a3a54a7914414e3779e33193f1f5693510e73cb7a87617447", + "core_symbol": "TEST", + "prefix": "TEST"} + } diff --git a/bitsharesbase/memo.py b/bitsharesbase/memo.py new file mode 100644 index 0000000000000000000000000000000000000000..a0714bbcbc88912f894d432f57be96812305130b --- /dev/null +++ b/bitsharesbase/memo.py @@ -0,0 +1,34 @@ +from graphenebase.memo import ( + encode_memo as GPHencode_memo, + decode_memo as GPHdecode_memo +) + + +def encode_memo(priv, pub, nonce, message): + """ Encode a message with a shared secret between Alice and Bob + + :param PrivateKey priv: Private Key (of Alice) + :param PublicKey pub: Public Key (of Bob) + :param int nonce: Random nonce + :param str message: Memo message + :return: Encrypted message + :rtype: hex + + """ + return GPHencode_memo(priv, pub, nonce, message) + + +def decode_memo(priv, pub, nonce, message): + """ Decode a message with a shared secret between Alice and Bob + + :param PrivateKey priv: Private Key (of Bob) + :param PublicKey pub: Public Key (of Alice) + :param int nonce: Nonce used for Encryption + :param bytes message: Encrypted Memo message + :return: Decrypted message + :rtype: str + :raise ValueError: if message cannot be decoded as valid UTF-8 + string + + """ + return GPHdecode_memo(priv, pub, nonce, message) diff --git a/bitsharesbase/objects.py b/bitsharesbase/objects.py new file mode 100644 index 0000000000000000000000000000000000000000..7ea19ec32c67c8a7155ae2d2644ab1a34d4f4c58 --- /dev/null +++ b/bitsharesbase/objects.py @@ -0,0 +1,210 @@ +import json +from collections import OrderedDict +from graphenebase.types import ( + Uint8, Int16, Uint16, Uint32, Uint64, + Varint32, Int64, String, Bytes, Void, + Array, PointInTime, Signature, Bool, + Set, Fixed_array, Optional, Static_variant, + Map, Id, VoteId, + ObjectId as GPHObjectId +) +from graphenebase.objects import GrapheneObject, isArgsThisClass +from .chains import known_chains +from .objecttypes import object_type +from .account import PublicKey +from .chains import default_prefix +from graphenebase.objects import Operation as GPHOperation +from .operationids import operations + + +class ObjectId(GPHObjectId): + """ Encodes object/protocol ids + """ + def __init__(self, object_str, type_verify=None): + if len(object_str.split(".")) == 3: + space, type, id = object_str.split(".") + self.space = int(space) + self.type = int(type) + self.instance = Id(int(id)) + self.Id = object_str + if type_verify: + assert object_type[type_verify] == int(type),\ + "Object id does not match object type! " +\ + "Excpected %d, got %d" %\ + (object_type[type_verify], int(type)) + else: + raise Exception("Object id is invalid") + + +class Operation(GPHOperation): + def __init__(self, *args, **kwargs): + super(Operation, self).__init__(*args, **kwargs) + + def _getklass(self, name): + module = __import__("bitsharesbase.operations", fromlist=["operations"]) + class_ = getattr(module, name) + return class_ + + def operations(self): + return operations + + def getOperationNameForId(self, i): + """ Convert an operation id into the corresponding string + """ + for key in operations: + if int(operations[key]) is int(i): + return key + return "Unknown Operation ID %d" % i + + +class Asset(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('amount', Int64(kwargs["amount"])), + ('asset_id', ObjectId(kwargs["asset_id"], "asset")) + ])) + + +class Memo(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + if "message" in kwargs and kwargs["message"]: + if "chain" not in kwargs: + chain = "BTS" + else: + chain = kwargs["chain"] + if isinstance(chain, str) and chain in known_chains: + chain_params = known_chains[chain] + elif isinstance(chain, dict): + chain_params = chain + else: + raise Exception("Memo() only takes a string or a dict as chain!") + if "prefix" not in chain_params: + raise Exception("Memo() needs a 'prefix' in chain params!") + prefix = chain_params["prefix"] + super().__init__(OrderedDict([ + ('from', PublicKey(kwargs["from"], prefix=prefix)), + ('to', PublicKey(kwargs["to"], prefix=prefix)), + ('nonce', Uint64(int(kwargs["nonce"]))), + ('message', Bytes(kwargs["message"])) + ])) + else: + super().__init__(None) + + +class Price(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('base', Asset(kwargs["base"])), + ('quote', Asset(kwargs["quote"])) + ])) + + +class PriceFeed(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('settlement_price', Price(kwargs["settlement_price"])), + ('maintenance_collateral_ratio', Uint16(kwargs["maintenance_collateral_ratio"])), + ('maximum_short_squeeze_ratio', Uint16(kwargs["maximum_short_squeeze_ratio"])), + ('core_exchange_rate', Price(kwargs["core_exchange_rate"])), + ])) + + +class Permission(GrapheneObject): + def __init__(self, *args, **kwargs): + # Allow for overwrite of prefix + prefix = kwargs.pop("prefix", default_prefix) + + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + + # Sort keys (FIXME: ideally, the sorting is part of Public + # Key and not located here) + kwargs["key_auths"] = sorted( + kwargs["key_auths"], + key=lambda x: repr(PublicKey(x[0], prefix=prefix).address), + reverse=False, + ) + accountAuths = Map([ + [String(e[0]), Uint16(e[1])] + for e in kwargs["account_auths"] + ]) + keyAuths = Map([ + [PublicKey(e[0], prefix=prefix), Uint16(e[1])] + for e in kwargs["key_auths"] + ]) + super().__init__(OrderedDict([ + ('weight_threshold', Uint32(int(kwargs["weight_threshold"]))), + ('account_auths', accountAuths), + ('key_auths', keyAuths), + ('extensions', Set([])), + ])) + + +class AccountOptions(GrapheneObject): + def __init__(self, *args, **kwargs): + # Allow for overwrite of prefix + prefix = kwargs.pop("prefix", default_prefix) + + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('memo_key', PublicKey(kwargs["memo_key"], prefix=prefix)), + ('voting_account', ObjectId(kwargs["voting_account"], "account")), + ('num_witness', Uint16(kwargs["num_witness"])), + ('num_committee', Uint16(kwargs["num_committee"])), + ('votes', Array([VoteId(o) for o in kwargs["votes"]])), + ('extensions', Set([])), + ])) + + +class AssetOptions(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('max_supply', Uint64(kwargs["max_supply"])), + ('market_fee_percent', Uint16(kwargs["market_fee_percent"])), + ('max_market_fee', Uint64(kwargs["max_market_fee"])), + ('issuer_permissions', Uint16(kwargs["issuer_permissions"])), + ('flags', Uint16(kwargs["flags"])), + ('core_exchange_rate', Price(kwargs["core_exchange_rate"])), + ('whitelist_authorities', + Array([ObjectId(o, "account") for o in kwargs["whitelist_authorities"]])), + ('blacklist_authorities', + Array([ObjectId(o, "account") for o in kwargs["blacklist_authorities"]])), + ('whitelist_markets', + Array([ObjectId(o, "asset") for o in kwargs["whitelist_markets"]])), + ('blacklist_markets', + Array([ObjectId(o, "asset") for o in kwargs["blacklist_markets"]])), + ('description', String(kwargs["description"])), + ('extensions', Set([])), + ])) diff --git a/bitsharesbase/objecttypes.py b/bitsharesbase/objecttypes.py new file mode 100644 index 0000000000000000000000000000000000000000..cf6e63dd8967650cdfda47e68aa00c7f75218abc --- /dev/null +++ b/bitsharesbase/objecttypes.py @@ -0,0 +1,19 @@ +#: Object types for object ids +object_type = {} +object_type["null"] = 0 +object_type["base"] = 1 +object_type["account"] = 2 +object_type["asset"] = 3 +object_type["force_settlement"] = 4 +object_type["committee_member"] = 5 +object_type["witness"] = 6 +object_type["limit_order"] = 7 +object_type["call_order"] = 8 +object_type["custom"] = 9 +object_type["proposal"] = 10 +object_type["operation_history"] = 11 +object_type["withdraw_permission"] = 12 +object_type["vesting_balance"] = 13 +object_type["worker"] = 14 +object_type["balance"] = 15 +object_type["OBJECT_TYPE_COUNT"] = 16 diff --git a/bitsharesbase/operationids.py b/bitsharesbase/operationids.py new file mode 100644 index 0000000000000000000000000000000000000000..8c044438157d4575932679302847a290661548ac --- /dev/null +++ b/bitsharesbase/operationids.py @@ -0,0 +1,46 @@ +#: Operation ids +operations = {} +operations["transfer"] = 0 +operations["limit_order_create"] = 1 +operations["limit_order_cancel"] = 2 +operations["call_order_update"] = 3 +operations["fill_order"] = 4 +operations["account_create"] = 5 +operations["account_update"] = 6 +operations["account_whitelist"] = 7 +operations["account_upgrade"] = 8 +operations["account_transfer"] = 9 +operations["asset_create"] = 10 +operations["asset_update"] = 11 +operations["asset_update_bitasset"] = 12 +operations["asset_update_feed_producers"] = 13 +operations["asset_issue"] = 14 +operations["asset_reserve"] = 15 +operations["asset_fund_fee_pool"] = 16 +operations["asset_settle"] = 17 +operations["asset_global_settle"] = 18 +operations["asset_publish_feed"] = 19 +operations["witness_create"] = 20 +operations["witness_update"] = 21 +operations["proposal_create"] = 22 +operations["proposal_update"] = 23 +operations["proposal_delete"] = 24 +operations["withdraw_permission_create"] = 25 +operations["withdraw_permission_update"] = 26 +operations["withdraw_permission_claim"] = 27 +operations["withdraw_permission_delete"] = 28 +operations["committee_member_create"] = 29 +operations["committee_member_update"] = 30 +operations["committee_member_update_global_parameters"] = 31 +operations["vesting_balance_create"] = 32 +operations["vesting_balance_withdraw"] = 33 +operations["worker_create"] = 34 +operations["custom"] = 35 +operations["assert"] = 36 +operations["balance_claim"] = 37 +operations["override_transfer"] = 38 +operations["transfer_to_blind"] = 39 +operations["blind_transfer"] = 40 +operations["transfer_from_blind"] = 41 +operations["asset_settle_cancel"] = 42 +operations["asset_claim_fees"] = 43 diff --git a/bitsharesbase/operations.py b/bitsharesbase/operations.py new file mode 100644 index 0000000000000000000000000000000000000000..2b20d57944b77d1310c26898094d93dac63df202 --- /dev/null +++ b/bitsharesbase/operations.py @@ -0,0 +1,263 @@ +from collections import OrderedDict +import json +from graphenebase.types import ( + Uint8, Int16, Uint16, Uint32, Uint64, + Varint32, Int64, String, Bytes, Void, + Array, PointInTime, Signature, Bool, + Set, Fixed_array, Optional, Static_variant, + Map, Id, VoteId +) +from .objects import GrapheneObject, isArgsThisClass +from .account import PublicKey +from .chains import default_prefix +from .operationids import operations +from .objects import ( + Operation, + Asset, + Memo, + Price, + PriceFeed, + Permission, + AccountOptions, + AssetOptions, + ObjectId +) + + +class Transfer(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + if "memo" in kwargs: + memo = Optional(Memo(kwargs["memo"])) + else: + memo = Optional(None) + super().__init__(OrderedDict([ + ('fee', Asset(kwargs["fee"])), + ('from', ObjectId(kwargs["from"], "account")), + ('to', ObjectId(kwargs["to"], "account")), + ('amount', Asset(kwargs["amount"])), + ('memo', memo), + ('extensions', Set([])), + ])) + + +class Asset_publish_feed(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('fee', Asset(kwargs["fee"])), + ('publisher', ObjectId(kwargs["publisher"], "account")), + ('asset_id', ObjectId(kwargs["asset_id"], "asset")), + ('feed', PriceFeed(kwargs["feed"])), + ('extensions', Set([])), + ])) + + +class Asset_update(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + if "new_issuer" in kwargs: + new_issuer = Optional(ObjectId(kwargs["new_issuer"], "account")) + else: + new_issuer = Optional(None) + super().__init__(OrderedDict([ + ('fee', Asset(kwargs["fee"])), + ('issuer', ObjectId(kwargs["issuer"], "account")), + ('asset_to_update', ObjectId(kwargs["asset_to_update"], "asset")), + ('new_issuer', new_issuer), + ('new_options', AssetOptions(kwargs["new_options"])), + ('extensions', Set([])), + ])) + + +class Op_wrapper(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('op', Operation(kwargs["op"])), + ])) + + +class Proposal_create(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + if "review_period_seconds" in kwargs: + review = Optional(Uint32(kwargs["review_period_seconds"])) + else: + review = Optional(None) + super().__init__(OrderedDict([ + ('fee', Asset(kwargs["fee"])), + ('fee_paying_account', ObjectId(kwargs["fee_paying_account"], "account")), + ('expiration_time', PointInTime(kwargs["expiration_time"])), + ('proposed_ops', + Array([Op_wrapper(o) for o in kwargs["proposed_ops"]])), + ('review_period_seconds', review), + ('extensions', Set([])), + ])) + + +class Proposal_update(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + + for o in ['active_approvals_to_add', + 'active_approvals_to_remove', + 'owner_approvals_to_add', + 'owner_approvals_to_remove', + 'key_approvals_to_add', + 'key_approvals_to_remove']: + if o not in kwargs: + kwargs[o] = [] + + super().__init__(OrderedDict([ + ('fee', Asset(kwargs["fee"])), + ('fee_paying_account', ObjectId(kwargs["fee_paying_account"], "account")), + ('proposal', ObjectId(kwargs["proposal"], "proposal")), + ('active_approvals_to_add', + Array([ObjectId(o, "account") for o in kwargs["active_approvals_to_add"]])), + ('active_approvals_to_remove', + Array([ObjectId(o, "account") for o in kwargs["active_approvals_to_remove"]])), + ('owner_approvals_to_add', + Array([ObjectId(o, "account") for o in kwargs["owner_approvals_to_add"]])), + ('owner_approvals_to_remove', + Array([ObjectId(o, "account") for o in kwargs["owner_approvals_to_remove"]])), + ('key_approvals_to_add', + Array([PublicKey(o) for o in kwargs["key_approvals_to_add"]])), + ('key_approvals_to_remove', + Array([PublicKey(o) for o in kwargs["key_approvals_to_remove"]])), + ('extensions', Set([])), + ])) + + +class Limit_order_create(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('fee', Asset(kwargs["fee"])), + ('seller', ObjectId(kwargs["seller"], "account")), + ('amount_to_sell', Asset(kwargs["amount_to_sell"])), + ('min_to_receive', Asset(kwargs["min_to_receive"])), + ('expiration', PointInTime(kwargs["expiration"])), + ('fill_or_kill', Bool(kwargs["fill_or_kill"])), + ('extensions', Set([])), + ])) + + +class Limit_order_cancel(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('fee', Asset(kwargs["fee"])), + ('fee_paying_account', ObjectId(kwargs["fee_paying_account"], "account")), + ('order', ObjectId(kwargs["order"], "limit_order")), + ('extensions', Set([])), + ])) + + +class Call_order_update(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('fee', Asset(kwargs["fee"])), + ('funding_account', ObjectId(kwargs["funding_account"], "account")), + ('delta_collateral', Asset(kwargs["delta_collateral"])), + ('delta_debt', Asset(kwargs["delta_debt"])), + ('extensions', Set([])), + ])) + + +class Asset_fund_fee_pool(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('fee', Asset(kwargs["fee"])), + ('from_account', ObjectId(kwargs["from_account"], "account")), + ('asset_id', ObjectId(kwargs["asset_id"], "asset")), + ('amount', Int64(kwargs["amount"])), + ('extensions', Set([])), + ])) + + +class Override_transfer(GrapheneObject): + def __init__(self, *args, **kwargs): + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + if "memo" in kwargs: + memo = Optional(Memo(kwargs["memo"])) + else: + memo = Optional(None) + super().__init__(OrderedDict([ + ('fee', Asset(kwargs["fee"])), + ('issuer', ObjectId(kwargs["issuer"], "account")), + ('from', ObjectId(kwargs["from"], "account")), + ('to', ObjectId(kwargs["to"], "account")), + ('amount', Asset(kwargs["amount"])), + ('memo', memo), + ('extensions', Set([])), + ])) + + +class Account_create(GrapheneObject): + def __init__(self, *args, **kwargs): + # Allow for overwrite of prefix + prefix = kwargs.pop("prefix", default_prefix) + + if isArgsThisClass(self, args): + self.data = args[0].data + else: + if len(args) == 1 and len(kwargs) == 0: + kwargs = args[0] + super().__init__(OrderedDict([ + ('fee', Asset(kwargs["fee"])), + ('registrar', ObjectId(kwargs["registrar"], "account")), + ('referrer', ObjectId(kwargs["referrer"], "account")), + ('referrer_percent', Uint16(kwargs["referrer_percent"])), + ('name', String(kwargs["name"])), + ('owner', Permission(kwargs["owner"], prefix=prefix)), + ('active', Permission(kwargs["active"], prefix=prefix)), + ('options', AccountOptions(kwargs["options"], prefix=prefix)), + ('extensions', Set([])), + ])) diff --git a/bitsharesbase/signedtransactions.py b/bitsharesbase/signedtransactions.py new file mode 100644 index 0000000000000000000000000000000000000000..f56575018199cb77334d254c8e1f4091456bb5b5 --- /dev/null +++ b/bitsharesbase/signedtransactions.py @@ -0,0 +1,30 @@ +from graphenebase.signedtransactions import Signed_Transaction as GrapheneSigned_Transaction +from .operations import Operation +from .chains import known_chains +import logging +log = logging.getLogger(__name__) + + +class Signed_Transaction(GrapheneSigned_Transaction): + """ Create a signed transaction and offer method to create the + signature + + :param num refNum: parameter ref_block_num (see ``getBlockParams``) + :param num refPrefix: parameter ref_block_prefix (see ``getBlockParams``) + :param str expiration: expiration date + :param Array operations: array of operations + """ + def __init__(self, *args, **kwargs): + super(Signed_Transaction, self).__init__(*args, **kwargs) + + def sign(self, wifkeys, chain="BTS"): + return super(Signed_Transaction, self).sign(wifkeys, chain) + + def verify(self, pubkeys=[], chain="BTS"): + return super(Signed_Transaction, self).verify(pubkeys, chain) + + def getOperationKlass(self): + return Operation + + def getKnownChains(self): + return known_chains diff --git a/bitsharesbase/transactions.py b/bitsharesbase/transactions.py new file mode 100644 index 0000000000000000000000000000000000000000..5c68c2ddfab4e054457870b24624a0c986b4933b --- /dev/null +++ b/bitsharesbase/transactions.py @@ -0,0 +1,32 @@ +from .account import PublicKey +from .chains import known_chains +from .signedtransactions import Signed_Transaction +from .operations import ( + Transfer, + Asset_publish_feed, + Asset_update, + Op_wrapper, + Proposal_create, + Proposal_update, + Limit_order_create, + Limit_order_cancel, + Call_order_update, + Asset_fund_fee_pool, + Override_transfer, + Account_create, +) +from .objects import Asset +from graphenebase.transactions import getBlockParams, formatTimeFromNow + + +def addRequiredFees(ws, ops, asset_id): + """ Auxiliary method to obtain the required fees for a set of + operations. Requires a websocket connection to a witness node! + """ + fees = ws.get_required_fees([i.json() for i in ops], asset_id) + for i, d in enumerate(ops): + ops[i].op.data["fee"] = Asset( + amount=fees[i]["amount"], + asset_id=fees[i]["asset_id"] + ) + return ops diff --git a/bitsharesdeprecated/client.py b/bitsharesdeprecated/client.py new file mode 100644 index 0000000000000000000000000000000000000000..749d0a97e8885b9b800adf74d77f43f15f70c7e0 --- /dev/null +++ b/bitsharesdeprecated/client.py @@ -0,0 +1,572 @@ +from bitsharesapi.api import BitSharesAPI +from bitsharesapi.websocket import BitSharesWebsocket +from collections import OrderedDict + +import logging +log = logging.getLogger(__name__) + + +class ExampleConfig(): + """ The behavior of your program (e.g. reactions on messages) can be + defined in a separated class (here called ``Config()``. It contains + the wallet and witness connection parameters: + + The config class is used to define several attributes *and* + methods that will be used during API communication. This is + particularily useful when dealing with event-driven websocket + notifications. + + **RPC-Only Connections**: + + The simples setup for this class is a simply RPC: + + .. code-block:: python + + class Config(): + wallet_host = "localhost" + wallet_port = 8092 + wallet_user = "" + wallet_password = "" + + and allows the use of rpc commands similar to the + ``BitSharesAPI`` class: + + .. code-block:: python + + graphene = GrapheneClient(Config) + print(graphene.rpc.info()) + print(graphene.rpc.get_account("init0")) + print(graphene.rpc.get_asset("USD")) + + All methods within ``graphene.rpc`` are mapped to the + corresponding RPC call of the wallet and the parameters are + handed over directly. + + **Additional Websocket Connections**: + + .. code-block:: python + + class Config(GrapheneWebsocketProtocol): ## Note the dependency + wallet_host = "localhost" + wallet_port = 8092 + wallet_user = "" + wallet_password = "" + witness_url = "ws://localhost:8090/" + witness_user = "" + witness_password = "" + + Some methods will be called automatically from the underlying websocket + protocol. They all start with ``onXXX`` and are described below. + + .. note:: ``data`` will be the notification from the websocket protocol that + caused the call. It will have an object id ``data["id"]`` to identify + it! + """ + + #: Wallet connection parameters + wallet_host = "localhost" + wallet_port = 8092 + wallet_user = "" + wallet_password = "" + + #: Witness connection parameter + witness_url = "ws://localhost:8090/" + witness_user = "" + witness_password = "" + + #: Accounts to watch. Changes on these will result in the method + #: ``onAccountUpdate()`` to be called + watch_accounts = ["fabian", "nathan"] + + #: Assets you want to watch. Changes will be used to call + #: ``onAssetUpdate()``. + watch_assets = ["USD"] + + #: Markets to watch. Changes to these will result in the method + #: ``onMarketUpdate()`` to be called + watch_markets = ["USD:CORE"] + + def onAccountUpdate(self, data): + """ Account updates will be triggered if attribute + ``watch_accounts`` is defined and either the corresponding + object ``1.2.x`` **or** ``2.6.x`` is updated. + + :param json data: notification that triggered the call (see + below) + + **Example notifications:** + + .. code-block:: json + + { + "most_recent_op": "2.9.252", + "pending_fees": 0, + "total_core_in_orders": 90000000, + "id": "2.6.17", + "owner": "1.2.17", + "lifetime_fees_paid": "26442269333", + "pending_vested_fees": 500000 + } + + .. code-block:: json + + { + "options": { + "extensions": [], + "memo_key": "", + "voting_account": "1.2.5", + "num_committee": 1, + "votes": [ + "0:11" + ], + "num_witness": 0 + }, + "referrer": "1.2.17", + "lifetime_referrer": "1.2.17", + "blacklisting_accounts": [], + "registrar": "1.2.17", + "membership_expiration_date": "1969-12-31T23:59:59", + "network_fee_percentage": 2000, + "cashback_vb": "1.13.0", + "id": "1.2.17", + "active": { + "weight_threshold": 1, + "account_auths": [], + "address_auths": [], + "key_auths": [ + [ + "GPH6MRyAjQq8ud7hVNYcfnVPJqcVpscN5So8BhtHuGYqET5GDW5CV", + 1 + ] + ] + }, + "name": "nathan", + "referrer_rewards_percentage": 0, + "whitelisting_accounts": [], + "owner": { + "weight_threshold": 1, + "account_auths": [], + "address_auths": [], + "key_auths": [ + [ + "GPH6MRyAjQq8ud7hVNYcfnVPJqcVpscN5So8BhtHuGYqET5GDW5CV", + 1 + ] + ] + }, + "statistics": "2.6.17", + "blacklisted_accounts": [], + "lifetime_referrer_fee_percentage": 8000 + } + """ + pass + + def onAssetUpdate(self, data): + """ This method is called when any of the assets in watch_assets + changes. The changes of the following objects are monitored: + + * Asset object (``1.3.x``) + * Dynamic Asset data (``2.3.x``) + * Bitasset data (``2.4.x``) + + Hence, this method needs to distinguish these three + objects! + + """ + pass + + def onMarketUpdate(self, data): + """ This method will be called if a subscribed market sees an + event (registered to through ``watch_markets``). + + :param json data: notification that caused the call + + Example notification: + + .. code-block:: json + + { + "seller": "1.2.17", + "id": "1.7.0", + "for_sale": 88109000, + "deferred_fee": 0, + "expiration": "2020-12-23T11:13:42", + "sell_price": { + "base": { + "asset_id": "1.3.1", + "amount": 100000000 + }, + "quote": { + "asset_id": "1.3.0", + "amount": 1000000000 + } + } + } + """ + pass + + def onBlock(self, data): + """ Will be triggered on every new block (e.g. change of object + `2.0.0`) + + :param json data: notification that caused the call + + Example notification: + + .. code-block:: python + + data = { + "id": "2.1.0", + "dynamic_flags": 0, + "current_witness": "1.6.7", + "next_maintenance_time": "2015-12-31T00:00:00", + "recently_missed_count": 1079135, + "current_aslot": 345685, + "head_block_id": "00002f5410b2991a7ed64994b6fe08353603a702", + "witness_budget": 0, + "last_irreversible_block_num": 12107, + "head_block_number": 12116, + "time": "2015-12-30T10:10:30", + "accounts_registered_this_interval": 0, + "recent_slots_filled": "340282366920938463463374607431768211455", + "last_budget_time": "2015-12-30T09:28:15" + } + """ + pass + + def onPropertiesChange(self, data): + """ Will be triggered every time a parameter of the blockchain + (e.g. fees or times) changes. + + :param json data: notification that caused the call + + Example notification: + + .. code-block:: python + + {"id":"2.0.0","parameters":{"current_fees":{"parameters":[[0,{"fee":3000000,"price_per_kbyte":2000000}], + [1,{"fee":1000000}],[2,{"fee":0}],[3,{"fee":100000}],[4,{}],[5,{"basic_fee":9500000,"premium_fee":400000000, + "price_per_kbyte":200000}],[6,{"fee":100000,"price_per_kbyte":20}],[7,{"fee":600000}],[8,{"membership_annual_fee":400000000, + "membership_lifetime_fee":2000000000}],[9,{"fee":100000000}],[10,{"symbol3":"100000000000","symbol4":"26000000000", + "long_symbol":500000000,"price_per_kbyte":20}],[11,{"fee":2000000,"price_per_kbyte":200000}],[12,{"fee":100000000}], + [13,{"fee":100000000}],[14,{"fee":4000000,"price_per_kbyte":200000}],[15,{"fee":4000000}],[16,{"fee":200000}], + [17,{"fee":20000000}],[18,{"fee":100000000}],[19,{"fee":10000}],[20,{"fee":1000000000}],[21,{"fee":4000000}], + [22,{"fee":4000000,"price_per_kbyte":20}],[23,{"fee":200000,"price_per_kbyte":20}],[24,{"fee":200000}],[25,{"fee":200000}], + [26,{"fee":4000000}],[27,{"fee":0,"price_per_kbyte":20}],[28,{"fee":1000000000}],[29,{"fee":200000}],[30,{"fee":200000}], + [31,{"fee":4000000}],[32,{"fee":1000000000}],[33,{"fee":200000}],[34,{"fee":200000}],[35,{"fee":200000,"price_per_kbyte":20}], + [36,{"fee":4000000}],[37,{}],[38,{"fee":1000000,"price_per_kbyte":20}],[39,{"fee":2000000,"price_per_output":2000000}], + [41,{"fee":2000000}]],"scale":10000},"block_interval":3,"maintenance_interval":3600,"maintenance_skip_slots":3, + "committee_proposal_review_period":3600,"maximum_transaction_size":98304,"maximum_block_size":2097152, + "maximum_time_until_expiration":86400,"maximum_proposal_lifetime":2419200,"maximum_asset_whitelist_authorities":10, + "maximum_asset_feed_publishers":10,"maximum_witness_count":1001,"maximum_committee_count":1001,"maximum_authority_membership":10, + "reserve_percent_of_fee":2000,"network_percent_of_fee":2000,"lifetime_referrer_percent_of_fee":3000, + "cashback_vesting_period_seconds":7776000,"cashback_vesting_threshold":10000000,"count_non_member_votes":true, + "allow_non_member_whitelists":false,"witness_pay_per_block":150000,"worker_budget_per_day":"50000000000", + "max_predicate_opcode":1,"fee_liquidation_threshold":10000000,"accounts_per_fee_scale":1000,"account_fee_scale_bitshifts":0, + "max_authority_depth":2,"extensions":[]},"next_available_vote_id":141,"active_committee_members":["1.5.11","1.5.20","1.5.19", + "1.5.14","1.5.4","1.5.7","1.5.8","1.5.9","1.5.10","1.5.12","1.5.15"],"active_witnesses":["1.6.12","1.6.13","1.6.14","1.6.15", + "1.6.16","1.6.17","1.6.18","1.6.19","1.6.20","1.6.21","1.6.22","1.6.24","1.6.25","1.6.26","1.6.27","1.6.28","1.6.30","1.6.34", + "1.6.35","1.6.37","1.6.38","1.6.42","1.6.43","1.6.45","1.6.49"]} + + """ + pass + + def onRegisterHistory(self): + """ Will be triggered once the websocket subsystem successfully + subscribed to the `history` API. + """ + pass + + def onRegisterDatabase(self): + """ Will be triggered once the websocket subsystem successfully + subscribed to the `database` API. + """ + pass + + def onRegisterNetworkNode(self): + """ Will be triggered once the websocket subsystem successfully + subscribed to the `network_node` API. + """ + pass + + def onRegisterNetworkBroadcast(self): + """ Will be triggered once the websocket subsystem successfully + subscribed to the `network_broadcast` API. + """ + pass + + +class BitSharesClient(): + """ The ``GrapheneClient`` class is an abstraction layer that makes the use of the + RPC and the websocket interface easier to use. A part of this + abstraction layer is to simplyfy the usage of objects and have + an internal objects map updated to reduce unecessary queries + (for enabled websocket connections). Advanced developers are of + course free to use the underlying API classes instead as well. + + :param class config: the configuration class + + If a websocket connection is configured, the websocket subsystem + can be run by: + + .. code-block:: python + + graphene = GrapheneClient(config) + graphene.run() + + """ + wallet_host = None + wallet_port = None + wallet_user = None + wallet_password = None + witness_url = None + witness_user = None + witness_password = None + prefix = None + + #: RPC connection to the cli-wallet + rpc = None + + #: Websocket connection to the witness/full node + ws = None + + def __init__(self, config): + """ Initialize configuration + """ + available_features = dir(config) + + if ("wallet_host" in available_features and + "wallet_port" in available_features): + self.wallet_host = config.wallet_host + self.wallet_port = config.wallet_port + + if ("wallet_user" in available_features and + "wallet_password" in available_features): + self.wallet_user = config.wallet_user + self.wallet_password = config.wallet_password + + self.rpc = BitSharesAPI(self.wallet_host, + self.wallet_port, + self.wallet_user, + self.wallet_password) + BitSharesAPI.__init__(self, + self.wallet_host, + self.wallet_port, + self.wallet_user, + self.wallet_password) + + self.core_asset = self.rpc.get_object("1.3.0")[0] + + # Connect to Witness Node + if "witness_url" in available_features: + self.witness_url = config.witness_url + + if ("witness_user" in available_features and + "witness_password" in available_features): + self.witness_user = config.witness_user + self.witness_password = config.witness_password + + self.ws = BitSharesWebsocket(self.witness_url, + self.witness_user, + self.witness_password, + proto=config) + + # Register Call available backs + if "onPropertiesChange" in available_features: + self.setObjectCallbacks({"2.0.0": config.onPropertiesChange}) + if "onBlock" in available_features: + self.setObjectCallbacks({"2.1.0": config.onBlock}) + if ("watch_accounts" in available_features and + "onAccountUpdate" in available_features): + account_ids = [] + for a in config.watch_accounts: + account = self.ws.get_account(a) + if "id" in account: + account_ids.append(account["id"]) + else: + log.warn("Account %s could not be found" % a) + self.setAccountsDispatcher(account_ids, config.onAccountUpdate) + if "market_separator" in available_features: + self.market_separator = config.market_separator + else: + self.market_separator = ":" + if ("watch_markets" in available_features): + self.markets = {} + for market in config.watch_markets: + try: + [quote_symbol, base_symbol] = market.split(self.market_separator) + except: + raise ValueError("An error has occured trying to " + + "parse the markets! Please " + + "check your values") + try: + quote = self.ws.get_asset(quote_symbol) + base = self.ws.get_asset(base_symbol) + except: + raise Exception("Couldn't load assets for market %s" + % market) + if not quote or not base: + raise Exception("Couldn't load assets for market %s" + % market) + + if "id" in quote and "id" in base: + if "onMarketUpdate" in available_features: + self.markets.update({ + market: {"quote": quote["id"], + "base": base["id"], + "base_symbol": base["symbol"], + "quote_symbol": quote["symbol"], + "callback": config.onMarketUpdate}}) + else: # No callbacks + self.markets.update({ + market: {"quote": quote["id"], + "base": base["id"], + "base_symbol": base["symbol"], + "quote_symbol": quote["symbol"]}}) + else: + log.warn("Market assets could not be found: %s" + % market) + self.setMarketCallBack(self.markets) + + if ("watch_assets" in available_features): + assets = [] + for asset in config.watch_assets: + a = self.ws.get_asset(asset) + if not a: + log.warning("The asset %s does not exist!" % a) + + if ("onAssetUpdate" in available_features): + a["callback"] = config.onAssetUpdate + assets.append(a) + self.setAssetDispatcher(assets) + + if "onRegisterHistory" in available_features: + self.setEventCallbacks( + {"registered-history": config.onRegisterHistory}) + if "onRegisterDatabase" in available_features: + self.setEventCallbacks( + {"registered-database": config.onRegisterDatabase}) + if "onRegisterNetworkNode" in available_features: + self.setEventCallbacks( + {"registered-network-node": config.onRegisterNetworkNode}) + if "onRegisterNetworkBroadcast" in available_features: + self.setEventCallbacks( + {"registered-network-broadcast": + config.onRegisterNetworkBroadcast}) + + self.core_asset = self.ws.get_object("1.3.0") + + if not self.core_asset: + raise Exception("Neither WS nor RPC propery configured!") + + if self.core_asset["symbol"] == "BTS": + self.prefix = "BTS" + elif self.core_asset["symbol"] == "MUSE": + self.prefix = "MUSE" + elif self.core_asset["symbol"] == "TEST": + self.prefix = "TEST" + elif self.core_asset["symbol"] == "CORE": + self.prefix = "GPH" + + """ Get network configuration + """ + def getChainInfo(self): + """ Returns some information about the connected chain. + + :return: Blockchain data + :rtype: json + + .. warning:: Note, this does not verify if the cli-wallet is + on the same network as the witness node! + + Example: + + .. code-block:: s + + {'chain_id': 'b8d1603965b3eb1acba27e62ff59f74efa3154d43a4188d381088ac7cdf35539', + 'core_symbol': 'CORE', + 'prefix': 'GPH'} + + """ + if self.ws: + core_asset = self.ws.get_object("1.3.0") + chain_id = self.ws.get_chain_id() + elif self.rpc: + core_asset = self.rpc.get_object("1.3.0") + chain_id = self.rpc.info()["chain_id"] + else: + raise Exception("Neither either ws or rpc connection!") + return {"prefix": self.prefix, + "core_symbol": core_asset["symbol"], + "chain_id": chain_id} + + def getObject(self, oid): + """ Get an Object either from Websocket store (if available) or + from RPC connection. + """ + if self.ws: + [_instance, _type, _id] = oid.split(".") + if (not (oid in self.ws.objectMap) or + _instance == "1" and _type == "7"): # force refresh orders + data = self.ws.get_object(oid) + self.ws.objectMap[oid] = data + else: + data = self.ws.objectMap[oid] + if len(data) == 1: + return data[0] + else: + return data + else: + return self.rpc.get_object(oid)[0] + + def get_object(self, oid): + """ Identical to ``getObject`` + """ + return self.getObject(oid) + + """ Forward these calls to Websocket API + """ + def setEventCallbacks(self, callbacks): + """ Internally used to register subsystem events, such as + `register-database` to callbacks + """ + self.ws.setEventCallbacks(callbacks) + + def setAccountsDispatcher(self, accounts, callback): + """ Internally used to register a account notification dispatcher + """ + self.ws.setAccountsDispatcher(accounts, callback) + + def setObjectCallbacks(self, callbacks): + """ Internally used to register object notification callbacks + """ + self.ws.setObjectCallbacks(callbacks) + + def setMarketCallBack(self, markets): + """ Internally used to register Market update callbacks + """ + self.ws.setMarketCallBack(markets) + + def setAssetDispatcher(self, markets): + """ Internally used to register Market update callbacks + """ + self.ws.setAssetDispatcher(markets) + + """ Connect to Websocket and run asynchronously + """ + def connect(self): + """ Only *connect* to the websocket server. Does **not** run the + subsystem. + """ + self.ws.connect() + + def run_forever(self): + """ Only **run** the subsystem. Requires to run ``connect()`` + first. + """ + self.ws.run_forever() + + def run(self): + """ Connect to Websocket server **and** run the subsystem """ + self.connect() + self.run_forever() diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..1a3197b3586211e1ebc0d521024f916b69d5b270 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,192 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext + +help: + @echo "Please use \`make <target>' where <target> is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/python-graphenelib.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/python-graphenelib.qhc" + +applehelp: + $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/python-graphenelib" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/python-graphenelib" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/account.rst b/docs/account.rst new file mode 100644 index 0000000000000000000000000000000000000000..e63cf596eed182bf952c48c29add963667074abe --- /dev/null +++ b/docs/account.rst @@ -0,0 +1,66 @@ +************** +Account Module +************** + +Address Class +############# + +.. autoclass:: graphenebase.account.Address + :members: __repr__, __str__, __format__, __bytes__ + +PublicKey Class +############### + +.. autoclass:: graphenebase.account.PublicKey + :members: __repr__, __str__, __format__, __bytes__ + +PrivateKey Class +################ + +.. autoclass:: graphenebase.account.PrivateKey + :members: + +Brainkey +######## + +.. autoclass:: graphenebase.account.BrainKey + :members: + +Remarks +####### + +Format vs. Repr +*************** + +.. code-block:: python + + print("Private Key : " + format(private_key,"WIF")) + print("Secret Exponent (hex) : " + repr(private_key)) + print("BTS PubKey (hex) : " + repr(private_key.pubkey)) + print("BTS PubKey : " + format(private_key.pubkey, "BTS")) + print("BTS Address : " + format(private_key.address,"BTS")) + +Output:: + + Private Key : 5Jdv8JHh4r2tUPtmLq8hp8DkW5vCp9y4UGgj6udjJQjG747FCMc + Secret Exponent (hex) : 6c2662a6ac41bd9132a9f846847761ab4f80c82d519cdf92f40dfcd5e97ec5b5 + BTS PubKey (hex) : 021760b78d93878af16f8c11d22f0784c54782a12a88bbd36be847ab0c8b2994de + BTS PubKey : BTS54nWRnewkASXXTwpn3q4q8noadzXmw4y1KpED3grup7VrDDRmx + BTS Address : BTSCmUwH8G1t3VSZRH5kwxx31tiYDNrzWvyW + +Compressed vs. Uncompressed +*************************** + +.. code-block:: python + + print("BTC uncomp. Pubkey (hex): " + repr(private_key.uncompressed.pubkey)) + print("BTC Address (uncompr) : " + format(private_key.uncompressed.address,"BTC")) + print("BTC comp. Pubkey (hex) : " + repr(private_key.pubkey)) + print("BTC Address (compr) : " + format(private_key.address,"BTC")) + +Output:: + + BTC uncomp. Pubkey (hex): 041760b78d93878af16f8c11d22f0784c54782a12a88bbd36be847ab0c8b2994de4d5abd46cabab34222023cd9034e1e6c0377fac5579a9c01e46b9498529aaf46 + BTC Address (uncompr) : 1JidAV2npbyLn77jGYQtkpJDjx6Yt5eJSh + BTC comp. Pubkey (hex) : 021760b78d93878af16f8c11d22f0784c54782a12a88bbd36be847ab0c8b2994de + BTC Address (compr) : 1GZ1JCW3kdL4LoCWbzHK4oV6V8JcUGG8HF diff --git a/docs/base58.rst b/docs/base58.rst new file mode 100644 index 0000000000000000000000000000000000000000..b8bfeb20e7297fcc70b0fcbeaabdbefcfa1834f3 --- /dev/null +++ b/docs/base58.rst @@ -0,0 +1,25 @@ +************ +Base58 Class +************ + +This class serves as an abstraction layer to deal with base58 encoded strings +and their corresponding hex and binary representation throughout the library. + +Examples: +######### + +.. code-block:: python + + format(Base58("02b52e04a0acfe611a4b6963462aca94b6ae02b24e321eda86507661901adb49"),"wif") + repr(Base58("5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd")) + +Output::: + + "5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd" + "02b52e04a0acfe611a4b6963462aca94b6ae02b24e321eda86507661901adb49" + +Definitions +########### + +.. autoclass:: graphenebase.base58.Base58 + :members: diff --git a/docs/bip38.rst b/docs/bip38.rst new file mode 100644 index 0000000000000000000000000000000000000000..04a77360bf620c7f1cbffbbf2a670e5fa8f99c31 --- /dev/null +++ b/docs/bip38.rst @@ -0,0 +1,32 @@ +**************************** +Bip38 Encrypted Private Keys +**************************** + +BIP 38 allows to encrypt and decrypt private keys in the WIF format. + +Examples +######## + +.. code-block:: python + + from graphenebase import PrivateKey + from graphenebase.bip38 import encrypt + + format(encrypt(PrivateKey("5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd"),"SecretPassPhrase"), "encwif") + + >> "6PRN5mjUTtud6fUXbJXezfn6oABoSr6GSLjMbrGXRZxSUcxThxsUW8epQi", + +.. code-block:: python + + from graphenebase import PrivateKey + from graphenebase.bip38 import decrypt + + format(decrypt("6PRN5mjUTtud6fUXbJXezfn6oABoSr6GSLjMbrGXRZxSUcxThxsUW8epQi","SecretPassPhrase"),"wif"), + + >> "5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd", + +Definitions +########### + +.. automodule:: graphenebase.bip38 + :members: encrypt, decrypt diff --git a/docs/classes.rst b/docs/classes.rst new file mode 100644 index 0000000000000000000000000000000000000000..fe70dbc0f4507ce9a3c9831fe02944079b0f5384 --- /dev/null +++ b/docs/classes.rst @@ -0,0 +1,44 @@ +******* +Classes +******* + +The library comes with a set of classes that are separated in different +packages: + +Exchange Package +################ + +.. toctree:: + :maxdepth: 1 + + exchange + +API Package +########### + +.. toctree:: + :maxdepth: 1 + + client + rpc + websocket + websocketrpc + +Base Package +############ + +.. toctree:: + :maxdepth: 2 + + base58 + account + memo + bip38 + +Extra Package +############# + +.. toctree:: + :maxdepth: 1 + + proposal diff --git a/docs/client.rst b/docs/client.rst new file mode 100644 index 0000000000000000000000000000000000000000..2cb4bcf2a4219f6b53640a1e18a1ddd3f5a3bb55 --- /dev/null +++ b/docs/client.rst @@ -0,0 +1,15 @@ +*************** +Graphene Client +*************** + +Configuration +############# + +.. autoclass:: grapheneapi.grapheneclient.ExampleConfig + :members: + +GrapheneClient +############## + +.. autoclass:: grapheneapi.grapheneclient.GrapheneClient + :members: diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..23dcf5d0b81ce9edaa4d6e7873b758f4884a52d3 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,286 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# python-graphenelib documentation build configuration file, created by +# sphinx-quickstart on Fri Jun 5 14:06:38 2015. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath('../scripts/')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = ["sphinx.ext.autodoc"] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'python-graphenelib' +copyright = '2015, Fabian Schuh' +author = 'Fabian Schuh' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '0.1' +# The full version, including alpha/beta/rc tags. +release = '0.1' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'pyramid' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# "<project> v<release> documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a <link> tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'python-graphenelibdoc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', + +# Latex figure (float) alignment +#'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'python-graphenelib.tex', 'python-graphenelib Documentation', + 'Fabian Schuh', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'python-graphenelib', 'python-graphenelib Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'python-graphenelib', 'python-graphenelib Documentation', + author, 'python-graphenelib', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False diff --git a/docs/exchange.rst b/docs/exchange.rst new file mode 100644 index 0000000000000000000000000000000000000000..ad3c2b1a5dcecc57af19f2d904b74632824f98b4 --- /dev/null +++ b/docs/exchange.rst @@ -0,0 +1,119 @@ +***************** +Graphene Exchange +***************** + +This module simplyfies the development of trading bots by adding an +abstraction layer to deal with blockchain specific APIs and offers a +simpliefied API that is commonly used by many exchanges (e.g. Poloniex). + +The exchange module knows two modes: + +* **RPC to cli-wallet** + + This mode performs transaction construction and + signing in using the cli_wallet. It connects to it using Remote + Procedure Calls (RPC) and requires the **active** private key of the + trading account to be installed in the cli_wallet with ``import_key + <account> <wif>`` + +* **Transaction Signing**: + + This mode performs everything in python and + does **not** depend on a cli_wallet connection. It requires the the + **active** private key is provided in the configuration. + +Usage 1 +####### + +In this example, we call some of the calls available to the exchange +module. This example can be run **safely** by any one since the Exchange +module is instanciated with ``safe_mode=True``. +Note that you will get an error because the private key does not give +access to the provided example account. + +.. code-block:: python + + from grapheneexchange.exchange import GrapheneExchange + from pprint import pprint + + + class Config(): + witness_url = "wss://bitshares.openledger.info/ws" + witness_user = "" + witness_password = "" + + watch_markets = ["USD_BTS", "GOLD_BTS"] + market_separator = "_" + account = "xeroc" + wif = "BTS5Bfhb7aRCWUTh1CkrSXUyRJsSqqzD1rQCVkxmrHGfCF8HuktfR" + + if __name__ == '__main__': + dex = GrapheneExchange(Config, safe_mode=True) + pprint((dex.returnTradeHistory("USD_BTS"))) + pprint((dex.returnTicker())) + pprint((dex.return24Volume())) + pprint((dex.returnOrderBook("USD_BTS"))) + pprint((dex.returnBalances())) + pprint((dex.returnOpenOrders("all"))) + pprint(dex.buy("USD_BTS", 0.001, 10)) + pprint(dex.sell("USD_BTS", 0.001, 10)) + pprint(dex.close_debt_position("USD")) + pprint(dex.adjust_debt(10, "USD", 3.0)) + pprint(dex.borrow(10, "USD", 3.0)) + pprint(dex.cancel("1.7.1111")) + +Usage 2 +####### + +A simple example for a bot can be found in +`scripts/exchange-bridge-market-maker/` and works like this: + +.. code-block:: python + + from grapheneexchange import GrapheneExchange + import config + dex = GrapheneExchange(config, safe_mode=False) + #: Close all orders that have been put in those markets previously + orders = dex.returnOpenOrders() + for m in orders: + for o in orders[m]: + print(" - %s" % o["orderNumber"]) + dex.cancel(o["orderNumber"]) + #: Buy and Sell Prices + buy_price = 1 - config.bridge_spread_percent / 200 + sell_price = 1 + config.bridge_spread_percent / 200 + #: Amount of Funds available for trading (per asset) + balances = dex.returnBalances() + asset_ids = [] + amounts = {} + for market in config.watch_markets : + quote, base = market.split(config.market_separator) + asset_ids.append(base) + asset_ids.append(quote) + assets_unique = list(set(asset_ids)) + for a in assets_unique: + if a in balances : + amounts[a] = balances[a] * config.bridge_amount_percent / 100 / asset_ids.count(a) + for m in config.watch_markets: + quote, base = m.split(config.market_separator) + if quote in amounts : + print(" - Selling %f %s for %s @%f" % (amounts[quote], quote, base, sell_price)) + dex.sell(m, sell_price, amounts[quote]) + if base in amounts : + print(" - Buying %f %s with %s @%f" % (amounts[base], base, quote, buy_price)) + dex.buy(m, buy_price, amounts[base] * buy_price) + +Specifications +############## + +GrapheneExchange +**************** + +.. autoclass:: grapheneexchange.exchange.GrapheneExchange + :members: + +Configuration +************* + +.. autoclass:: grapheneexchange.exchange.ExampleConfig + :members: diff --git a/docs/graphene-api.rst b/docs/graphene-api.rst new file mode 100644 index 0000000000000000000000000000000000000000..088480e5a98c0d857cc7b716bc62c8948a3d0987 --- /dev/null +++ b/docs/graphene-api.rst @@ -0,0 +1,48 @@ +Remote Procedure Calls +====================== + +We provide several different API's. Each API has its own ID. When running +`witness_node`, initially two API's are available: + +* API 0: provides read-only access to the database +* API 1 is used to login and gain access to additional, restricted API's. + +Unrestricted Calls +------------------ + +Since API 0 is state-less it is accessible via regular JSON-RPC calls like::: + + $ curl --data '{"jsonrpc": "2.0", "method": "get_accounts", "params": [["1.2.0"]], "id": 1}' http://127.0.0.1:8090/rpc + +We can do the same thing using an HTTP client such as curl for API's which do +not require login or other session state::: + + $ curl --data '{"jsonrpc": "2.0", "method": "call", "params": [0, "get_accounts", [["1.2.0"]]], "id": 1}' http://127.0.0.1:8090/rpc + {"id":1,"result":[{"id":"1.2.0","annotations":[],"membership_expiration_date":"1969-12-31T23:59:59","registrar":"1.2.0","referrer":"1.2.0","lifetime_referrer":"1.2.0","network_fee_percentage":2000,"lifetime_referrer_fee_percentage":8000,"referrer_rewards_percentage":0,"name":"committee-account","owner":{"weight_threshold":1,"account_auths":[],"key_auths":[],"address_auths":[]},"active":{"weight_threshold":6,"account_auths":[["1.2.5",1],["1.2.6",1],["1.2.7",1],["1.2.8",1],["1.2.9",1],["1.2.10",1],["1.2.11",1],["1.2.12",1],["1.2.13",1],["1.2.14",1]],"key_auths":[],"address_auths":[]},"options":{"memo_key":"GPH1111111111111111111111111111111114T1Anm","voting_account":"1.2.0","num_witness":0,"num_committee":0,"votes":[],"extensions":[]},"statistics":"2.7.0","whitelisting_accounts":[],"blacklisting_accounts":[]}]} + +Restricted Calls +---------------- + +However, the restricted APIs require login and are **only** accessible over the +websocket RPC. Here is an example using wscat package from npm for websockets::: + + $ npm install -g wscat + $ wscat -c ws://127.0.0.1:8090 + > {"id":1, "method":"call", "params":[0,"get_accounts",[["1.2.0"]]]} + < {"id":1,"result":[{"id":"1.2.0","annotations":[],"membership_expiration_date":"1969-12-31T23:59:59","registrar":"1.2.0","referrer":"1.2.0","lifetime_referrer":"1.2.0","network_fee_percentage":2000,"lifetime_referrer_fee_percentage":8000,"referrer_rewards_percentage":0,"name":"committee-account","owner":{"weight_threshold":1,"account_auths":[],"key_auths":[],"address_auths":[]},"active":{"weight_threshold":6,"account_auths":[["1.2.5",1],["1.2.6",1],["1.2.7",1],["1.2.8",1],["1.2.9",1],["1.2.10",1],["1.2.11",1],["1.2.12",1],["1.2.13",1],["1.2.14",1]],"key_auths":[],"address_auths":[]},"options":{"memo_key":"GPH1111111111111111111111111111111114T1Anm","voting_account":"1.2.0","num_witness":0,"num_committee":0,"votes":[],"extensions":[]},"statistics":"2.7.0","whitelisting_accounts":[],"blacklisting_accounts":[]}]} + +APIs +_______ + +The graphene witness node distinguishes sever different APIs for security +reasons: + +* `database_api` +* `history_api` +* `network_broadcast_api` +* `network_node_api` +* `login_api` + +The most important api for polling blockchain data is the `database_api`. A +list of all available calls can be found in +`graphene/libraries/app/include/graphene/app/database_api.hpp`. diff --git a/docs/graphene-objects.rst b/docs/graphene-objects.rst new file mode 100644 index 0000000000000000000000000000000000000000..20bf1b9ef334b77dccc6572b833edad7747e48c0 --- /dev/null +++ b/docs/graphene-objects.rst @@ -0,0 +1,44 @@ +Graphene Blockchain Objects +=========================== + +In contrast to most cryptocurrency wallets, the BitShares 2.0 has a different +model to represent the blockchain, its transactions and accounts. This chapter +wants to given an introduction to the concepts of *objects* as they are used by +the BitShares 2.0 client. Furthermore, we will briefly introduce the API and +show how to subscribe to object changes (such as new blocks or incoming +deposits). Afterwards, we will show how exchange may monitor their accounts and +credit incoming funds to their corresponding users. + +Objects +------- + +On the BitShares blockchains there are no addresses, but objects identified by a +unique *id*, an *type* and a *space* in the form::: + + space.type.id + +Some examples::: + + 1.2.15 # protocol space / account / id: 15 + 1.6.105 # protocol space / witness / id: 105 + 1.14.7 # protocol space / worker / id: 7 + + 2.1.0 # implementation space / dynamic global properties + 2.3.8 # implementation space / asset . id: 8 + +A programmatic description of all fields can be found in the +[sources](https://github.com/cryptonomex/graphene/blob/master/libraries/chain/include/graphene/chain/protocol/types.hpp). + +Accounts +-------- + +The BitShares blockchain users are requires to register each account with a +unique username and a public key on the blockchain. The blockchain assigns an +incremental user *id* and offers to resolve the name-to-id pair. For instance +`1.2.15`:: + + 2.6.80 # implementation space / account-balance / id: 80 + 2.7.80 # implementation space / account-statistics / id: 80 + 2.10.80 # implementation space / account-transactions / id: 80 + 2.8.80 # implementation space / transactions / id: 80 + 2.9.80 # implementation space / block-summary / id: 80 diff --git a/docs/graphene-ws.rst b/docs/graphene-ws.rst new file mode 100644 index 0000000000000000000000000000000000000000..52aeb25e5ccef3976b9862a2c80cf1b53f82249e --- /dev/null +++ b/docs/graphene-ws.rst @@ -0,0 +1,58 @@ +Websocket subscriptions +======================= + +Before we can subscribe to any changes, we first need to ask for access to the +`database`-api with:: + + > {"id":2,"method":"call","params":[0,"database",[]]} + < {"id":2,"result":1} + +The `result` will be our `DATABASE_API_ID`! + +In Graphene, we have the following subscriptions available: + +* `set_subscribe_callback( cb, bool clear_filter )`: + To simplify development a global subscription callback can be registered. +* `set_pending_transaction_callback( cb )`: + Notifications for incoming *unconfirmed* transactions. +* `set_block_applied_callback( blockid )`: + Gives a notification whenever the block `blockid` is applied to the + blockchain. + +Let's first get a global scubscription callback to disctinguish our +notifications from regular RPC calls::: + + > {"id":4,"method":"call","params":[DATABASE_API_ID,"set_subscribe_callback",[SUBSCRIPTION_ID]]} + +This call above will register `SUBSCRIPTION_ID` as id for notifications. + +Now, whenever you get an object from the witness (e.g. via `get_objects`) you +will automatically subscribe to any future changes of that object. + +After calling `set_subscribe_callback` the witness will start to send notices +every time the object changes::: + + < { + "method": "notice" + "params": [ + SUBSCRIPTION_ID, + [[ + { "id": "2.1.0", ... }, + { "id": ... }, + { "id": ... }, + { "id": ... } + ]] + ], + } + +Here is an example of a full session::: + + > {"id":1,"method":"call","params":[0,"login",["",""]]} + < {"id":1,"result":true} + > {"id":2,"method":"call","params":[0,"database",[]]} + < {"id":2,"result":1} + > {"id":3,"method":"call","params":[1,"set_subscribe_callback",[200]]} + < {"id":3,"result":true} + < {"method":"notice","params":[200,[[{"id":"2.1.0","random":"2033120557c36e278db2eaad818494f791ff4d7b0418858a7ab9b5a8","head_block_number":5,"head_block_id":"00000005171f82f1b6bd948e7d58d95e572001fd","time":"2015-05-01T13:05:50","current_witness":"1.7.5","next_maintenance_time":"2015-05-02T00:00:00"}]]]} + < {"method":"notice","params":[200,[[{"id":"2.1.0","random":"9d5ff7e453db4815005eb42ddd040e3afb459950f75f4440deb3dec0","head_block_number":6,"head_block_id":"000000060e3369d6feaf330ea9114cd855c93aab","time":"2015-05-01T13:05:55","current_witness":"1.7.3","next_maintenance_time":"2015-05-02T00:00:00"}]]]} + < {"method":"notice","params":[200,[[{"id":"2.1.0","random":"cb8686582c40634a0c0834d0f2c4ad19f8ca80598cc3eee2b93c124d","head_block_number":7,"head_block_id":"000000071d0bc8db55d7da75d1d880818d1930fd","time":"2015-05-01T13:06:00","current_witness":"1.7.0","next_maintenance_time":"2015-05-02T00:00:00"}]]]} diff --git a/docs/howto-exchanges-detailed.rst b/docs/howto-exchanges-detailed.rst new file mode 100644 index 0000000000000000000000000000000000000000..b55f6dec39999c09f83733cb87af32f7467246e1 --- /dev/null +++ b/docs/howto-exchanges-detailed.rst @@ -0,0 +1,456 @@ +Howto Interface your Exchange with Graphene (Detailed) +====================================================== + +This Howto servers as an introduction for exchanges that want to interface with +BitShares to allow trading of assets from the BitShares network. It is +recommended that the developer reads and understands the content of the +following articles: + +.. toctree:: + :maxdepth: 1 + + graphene-objects + graphene-api + graphene-ws + wallet + rpc + +.. note:: This tutorial explains the inner workings of the monitoring script + provided as `monitor.py` in the `scripts/monitor-despoits` directory. + +Network and Client Configuration +-------------------------------- + +Introduction +____________________ + +Similar to other crypto currencies, it is recommended to wait for several +confirmations of a transcation. Even though the consensus scheme of Graphene is +alot more secure than regular proof-of-work or other proof-of-stake schemes, we +still support exchanges that require more confirmations for deposits. + +We provide a so called *delayed* full node which accepts two additional +parameters for the configuration besides those already available with the +standard daemon. + +* `trusted-node` RPC endpoint of a trusted validating node (required) +* `delay-block-count` Number of blocks to delay before advancing chain state (required) + +The trusted-node is a regular full node directly connected to the P2P +network that works as a proxy. The `delay-block-count` gives the number of +blocks that the delayed full node will be behind the real blockchain. + +Overview of the Setup +------------------------------- + +In the following, we will setup and use the following network::: + + P2P network <-> Trusted Full Node <-> Delayed Full Node <-> API + +* P2P network: + The BitShares client uses a peer-to-peer network to connect and broadcasts + transactions there. A block producing full node will eventually catch your + transcaction and validates it by adding it into a new block. +* Trusted Full Node: + We will use a Full node to connect to the network directly. We call it + *trusted* since it is supposed to be under our control. +* Delayed Full Node: + The delayed full node node will provide us with a delayed and several times + confirmed and verified blockchain. Even though DPOS is more resistant against + forks than most other blockchain consensus schemes, we delay the blockchain + here to reduces the risk of forks even more. In the end, the delayed full + node is supposed to never enter an invalid fork. +* API: + Since we have a delayed full node that we can fully trust, we will interface + with this node to query the blockchain and receive notifications from it one + balance changes. + +The delayed full node should be in the same *local* network as the trusted full +node is in the same network and has internet access. Hence we will work with +the following IPs: + +* Trusted Full Node: + * extern: *internet access* + * intern: `192.168.0.100` + +* Delayed Full Node: + * extern: *no* internet access required + * intern: `192.168.0.101` + +Let's go into more detail how to set these up. + +Trusted Full Node +_________________ + +For the trusted full node, the default settings can be used. For later, we +will need to open the RPC port and listen to an IP address to connect the +delayed full node to.:: + + ./programs/witness_node/witness_node --rpc-endpoint="192.168.0.100:8090" + +.. note:: A *witness* node is identical to a full node if no authorized + block-signing private key is provided. + +Delayed Full Node +_________________ + +The delayed full node will need the IP address and port of the p2p-endpoint +from the trusted full node and the number of blocks that should be delayed. We +also need to open the RPC/Websocket port (to the local network!) so that we can +interface using RPC-JSON calls. + +For our example and for 10 blocks delaye (i.e. 30 seconds for 3 second block +intervals), we need::: + + ./programs/delayed_node/delayed_node --trusted-node="192.168.0.100:8090" --delay-block-count=10 --rpc-endpoint="192.168.0.101:8090" + +We could now connect via RPC: + +* `192.168.0.100:8090` : The trusted full node exposed to the internet +* `192.168.0.101:8090` : The delayed full node not exposed to the internet + +.. note:: For security reasons, an exchange should only interface with the delayed + full node. + +For obvious reasons, the trusted full node is should be running before +attempting to start the delayed full node. + +Interfacing via RPC and Websockets +---------------------------------- + +Overview +________ + +In order to access the unrestricted API-0, we call make use of usual +*stateless* RPC-calls. To access the restricted API-1 we are required to use +the websocket connection with callbacks to access API-1: + +* API-0: `api.info()`, `api.get_*()`, ... +* API-1: `api.ws_exec([api_identifier, call], callback)` + +Accessing API-0 +_______________ + +Now that we have your delayed full node running at `192.168.0.101:8090`, we can +interface with it using websockets. In order to access the websocket +functionalities, we can make use of `GrapheneWebSocket` which is provided by +`python-graphene` libraries (:doc:`installation`). + +.. code-block:: python + + import json + from grapheneapi import GrapheneWebsocket, GrapheneWebsocketProtocol + if __name__ == '__main__': + api = GrapheneWebsocket("192.168.0.101", 8090, "", "") + # get configuration + chain_config = api.get_config() + # get dynamic properties (e.g. block head num) + dynam_proper = api.get_dynamic_global_properties() + # dump data + print(json.dumps(chain_config,indent=4)) + print(json.dumps(dynam_proper,indent=4)) + +This example opens up a Websocket connection with our delayed full node with +empty username and passwords. As can be seen, the `api` object takes any method +and translates them into a proper API call (see :doc:`graphene-api`). + +Accessing API-1 +_______________ + +Even though most basic interaction can be performed using API-0, we sometimes +need to sometimes hook into a running websocket connection. The difference +between API-0 and API-1 is that API-1 can be authorized against the full node +and be granted additional permissions. For instance, calling the transaction +history of an account requires access to `history_api` and needs an `api_id` to +access its associated calls. + +.. note:: The `GrapheneWebsocketProtocol` automatically obtains access to the + `database_api` aswell as the `history_api`. + +To be able to interact with `history_api`, we extend the default +`GrapheneWebsocketProtocol` protocol with + +.. code-block:: python + + class GrapheneMonitor(GrapheneWebsocketProtocol) : + def __init__(self) : + super().__init__() + def do_something(self, data) : + pass + def dump_api_ids(self) : + print(self.api_ids) + pass + def call_history_api(self,method,params, callback) : + self.wsexec([self.api_ids["history"], method, params], callback) + pass + +The variable `self.api_ids` is initiated when opening the websocket connection +automatically. + +.. note:: Since the websocket connection has a state and works asynchonously, + we need to hand over a callback function that will be executed when + the full node answers our request. + +We make use of this class when connecting to the full node: + +.. code-block:: python + + protocol = GrapheneMonitor + api = GrapheneWebsocket(host, port, user, password, protocol) + +We can now either use API-0 by issuing calls via `api.*method*(*params*)` or +asynchronously interact with restricted APIs via the class `GrapheneMonitor`. + +Subscribing to Object Changes +_____________________________ + +Besides polling for data, the full node is capable of sending notifications. +Let's subscribe to changes and have it printed out on screen. Todo so, we need +to know the object id (see :doc:`graphene-objects`) we are interested in. +For instance, if we want to get notified on changes of an account object, we +subscribe to `1.2.*` with `*` being the account identification numbers. +Alternatively, we can subscribe to changes of the corresponding balance (e.g. +modification of most recent operations that change the balance) by subscribing +to `2.6.*`. + +We subscribe by issuing `setObjectCallbacks()` and handing over a structure of +the form `id: callback`. Hence, each object can be assigned only one callback. + +In the following example, we print out modifications of the object only: + +.. code-block:: python + + api.setObjectCallbacks({ "2.6.69491" : print }) + api.connect() + api.run_forever() + +The example subscribes to modifications of the object "2.6.69491" and will call +`print` with the notification as parameter. + +A notification will be sent, whenever a value in the object `2.6.69491` +changes::: + + { + "id": "2.6.69491", + "total_core_in_orders": 0, + "most_recent_op": "2.9.0", + "pending_fees": 0, + "pending_vested_fees": 0, + "owner": "1.2.69491", + "lifetime_fees_paid": 0 + } + +To monitor balance changes we are mostly interested in `most_recent_op`-changes +which will be described in the following. + +To monitor accounts, we recommend to either use the `get_full_accounts` call or +to enable notifications manually in order to fetch the current state of an +account and *automatically* subscribe to future account updates including +balance update. + +.. note:: Please distinguish transactions from operations: Since a single + transaction may contain several (independent) operations, monitoring + an account may only require to investigate *operations* that change + the account. + +Decoding the Memo +_________________ + +In Graphene, memos are usually encrypted using a distinct memo key. That way, +exposing the memo private key will only expose transaction memos (for that key) +and not compromise any funds. It is thus safe to store the memo private key in +3rd party services and scripts. The memo public key can be obtained from the +account settings or via command line::: + + get_account myaccount + +in the cli wallet. The corresponding private key can be obtain from::: + + dump_private_keys + +Note that the latter command exposes all private keys in clear-text wif. + +The encrypted memo can be decoded with: + +.. code-block:: python + + from graphenebase import Memo, PrivateKey, PublicKey + memo_wif_key = "<wif-key>" + """ PubKey Prefix + Productive network: BTS + Testnetwork: GPH """ + #prefix = "GPH" + prefix = "BTS" + + memo = {...} # take from the transfer operation + privkey = PrivateKey(memo_wif_key) + pubkey = PublicKey(memo["from"], prefix=prefix) + memomsg = Memo.decode_memo(privkey, pubkey, memo["nonce"], memo["message"]) + +Monitoring Example Script +-------------------------- + +As an example, we can have notifications for all incoming transactions for any +account. Let's discuss this example script in more details: + +1) We first prepare our variables and import all required modules + + Define the `accountID` and the `memo_wif_key`. + The accountID can be obtained from the GUI wallet, or by issuing::: + + get_account <accountname> + + If the script exists abnormally, you can continue operations by setting + `last_op` to the last operation id that you have captured before the + abnormal exit. + +.. note:: The current implementation has a maxium history size of 100 + transaction. If you have missed more than 100 transaction with the + current implementation, manual fixing is required. + +.. code-block:: python + + import sys + import json + from grapheneapi import GrapheneWebsocket, GrapheneWebsocketProtocol + from graphenebase import Memo, PrivateKey, PublicKey + + """ Account id to monitor """ + accountID = "2.6.69585" + + """ Memo Key of the receiving account """ + memo_wif_key = "<wif-key>" + + """ Last operation ID that you have registered in your backend """ + last_op = "1.11.0" + + """ PubKey Prefix + Productive network: BTS + Testnetwork: GPH """ + prefix = "GPH" + #prefix = "BTS" + +2) We then overwrite the basis class so that we can access the restricted API-1: + +.. code-block:: python + + """ Callback on event + This function will be triggered on a notification of the witness. + If you subsribe (see below) to 2.6.*, the witness node will notify you of + any chances regarding your account_balance """ + class GrapheneMonitor(GrapheneWebsocketProtocol) : + last_op = "1.11.0" + account_id = "1" + def __init__(self) : + super().__init__() + + def printJson(self,d) : print(json.dumps(d,indent=4)) + +3) We define an entry point for our notifications. Here notifications for +account balance changes will call `onAccountUpdate`. We will only get the +history of the account since the last operation (`last_op`) and call +`process_operations`. + +.. code-block:: python + + def onAccountUpdate(self, data) : + # Get Operation ID that modifies our balance + opID = api.getObject(data["most_recent_op"])["operation_id"] + self.wsexec([self.api_ids["history"], "get_account_history", [self.account_id, self.last_op, 100, "1.11.0"]], self.process_operations) + self.last_op = opID + +4) The history returns an array of operations and we process each of them + individually. To do so, we query the api to get more details about + + * the sender + * the receiver + * transfer amount and asset + * fee amount and asset + * the memo + +.. code-block:: python + + def process_operations(self, operations) : + for operation in operations[::-1] : + opID = operation["id"] + block = operation["block_num"] + op = operation["op"][1] + + if operation["op"][0] != 0 : continue ## skip non-transfer operations + + # Get assets involved in Fee and Transfer + fee_asset = api.getObject(op["fee"]["asset_id"]) + amount_asset = api.getObject(op["amount"]["asset_id"]) + + # Amounts for fee and transfer + fee_amount = op["fee"]["amount"] / float(10**int(fee_asset["precision"])) + amount_amount= op["amount"]["amount"] / float(10**int(amount_asset["precision"])) + + # Get accounts involved + from_account = api.getObject(op["from"]) + to_account = api.getObject(op["to"]) + + # Decode the memo + memo = op["memo"] + try : # if possible + privkey = PrivateKey(memo_wif_key) + pubkey = PublicKey(memo["from"], prefix=prefix) + memomsg = Memo.decode_memo(privkey, pubkey, memo["nonce"], memo["message"]) + except Exception as e: # if not possible + memomsg = "--cannot decode-- (%s)" % str(e) + +5) We then dump all of these information onto the screen. At this point an + exchange may want to forward the transaction as well as the memo to some + internal post processing and increase the customers balance. + + We here dump all of these information onto the screen. + +.. code-block:: python + + # Print out + print("last_op: %s | block:%s | from %s -> to: %s | fee: %f %s | amount: %f %s | memo: %s" % ( + opID, block, + from_account["name"], to_account["name"], + fee_amount, fee_asset["symbol"], + amount_amount, amount_asset["symbol"], + memomsg)) + +Now that we have extended our `GrapheneWebsocketProtocol` we make use of it as +follows. We first define our RPC connection settings and define our protocol to +be `GrapheneMonitor`. + +.. code-block:: python + + if __name__ == '__main__': + + ## RPC connections + host = "localhost" + port = 8090 + user = "" + password = "" + + ## Monitor definitions + protocol = GrapheneMonitor + +Then we define some initial parameters for our monitor. + +.. note:: The account id is derived from the given parameter whereas the first + part is replace to access the account object to obtain its history later. + +.. code-block:: python + + protocol.last_op = last_op ## last operation logged + protocol.account_id = "1.2.%s" % accountID.split(".")[2] ## account to monitor + +We connect to the websocket protocol, define out subscription callback and let +the script run indefinitely to listen to websocket notifications: + +.. code-block:: python + + ## Open Up Graphene Websocket API + api = GrapheneWebsocket(host, port, user, password, protocol) + ## Set Callback for object changes + api.setObjectCallbacks({accountID : protocol.onAccountUpdate}) + ## Run the Websocket connection continuously + api.connect() + api.run_forever() diff --git a/docs/howto-exchanges.rst b/docs/howto-exchanges.rst new file mode 100644 index 0000000000000000000000000000000000000000..faa02fe0367ec6b1aca43642dd923684521efc03 --- /dev/null +++ b/docs/howto-exchanges.rst @@ -0,0 +1,172 @@ +Howto Interface your Exchange with Graphene (Quick-Guide) +========================================================= + +.. note:: This tutorial gives a very quick introduction on how to interface + your exchange with graphene. For a more detailed explanation please see + :doc:`howto-exchanges-detailed` + + +Network and Client Configuration +-------------------------------- + +Overview of the Setup +------------------------------- + +In the following, we will setup and use the following network::: + + P2P network <-> Trusted Full Node <-> Delayed Full Node <-> API + +* Trusted Full Node: + We will use a Full node to connect to the network directly. We call it + *trusted* since it is supposed to be under our control. +* Delayed Full Node: + The delayed full node node will provide us with a delayed and several times + confirmed and verified blockchain. + +Trusted Full Node +_________________ + +For the trusted full node, the default settings can be used. For later, we +will need to open the RPC port and listen to an IP address to connect the +delayed full node to.:: + + ./programs/witness_node/witness_node --rpc-endpoint="<internal-trusted-node-ip>:8090" + +.. note:: A *witness* node is identical to a full node if no authorized + block-signing private key is provided. + +Delayed Full Node +_________________ + +Setup a delayed node with `10` blocks delay (number of confirmations) and +connect to the trusted node::: + + ./programs/delayed_node/delayed_node --trusted-node="<internal-trusted-node-ip>:8090" --delay-block-count=10 --rpc-endpoint="<local-ip>:8090" + +Hence, + +* `<internal-trusted-node-ip>:8090` : The trusted full node exposed to the internet +* `<local-ip>:8090` : The delayed full node not exposed to the internet + +.. note:: For security reasons, an exchange should only interface with the delayed + full node. + +Monitoring Example Script +-------------------------- + +As an example, we can have notifications for all incoming transactions for any +account. The monitoring script located in `examples/monitor.py` is discussed in +more details in :doc:`howto-exchanges-detailed`. + +All we need to define are the `accountID` and the `memo_wif_key`. The +accountID can be obtained from the GUI wallet, or by issuing::: + + get_account <accountname> + +This command also exposes the memo *public key*. The corresponding *private key* can be extracted from::: + + dump_private_keys + +If the monitoring script exists abnormally, you can continue operations by +setting `last_op` to the last operation id that you have captured before the +abnormal exit. + +.. note:: The current implementation has a maxium history size of 100 + transaction. If you have missed more than 100 transaction with the + current implementation, manual fixing is required. + +.. code-block:: python + + import sys + import json + from grapheneapi import GrapheneWebsocket, GrapheneWebsocketProtocol + from graphenebase import Memo, PrivateKey, PublicKey + + """ RPC connection settings """ + host = "localhost" + port = 8090 + user = "" + password = "" + + """ Account id to monitor """ + accountID = "2.6.69585" + + """ Memo Key of the receiving account """ + memo_wif_key = "<wif-key>" + + """ Last operation ID that you have registered in your backend """ + last_op = "1.11.0" + + """ PubKey Prefix + Productive network: BTS + Testnetwork: GPH """ + #prefix = "GPH" + prefix = "BTS" + + """ Callback on event + This function will be triggered on a notification of the witness. + If you subsribe (see below) to 2.6.*, the witness node will notify you of + any chances regarding your account_balance """ + class GrapheneMonitor(GrapheneWebsocketProtocol) : + last_op = "1.11.0" + account_id = "1" + def __init__(self) : + super().__init__() + + def printJson(self,d) : print(json.dumps(d,indent=4)) + + def onAccountUpdate(self, data) : + # Get Operation ID that modifies our balance + opID = api.getObject(data["most_recent_op"])["operation_id"] + self.wsexec([self.api_ids["history"], "get_account_history", [self.account_id, self.last_op, 100, "1.11.0"]], self.process_operations) + self.last_op = opID + + def process_operations(self, operations) : + for operation in operations[::-1] : + opID = operation["id"] + block = operation["block_num"] + op = operation["op"][1] + + if operation["op"][0] != 0 : continue ## skip non-transfer operations + + # Get assets involved in Fee and Transfer + fee_asset = api.getObject(op["fee"]["asset_id"]) + amount_asset = api.getObject(op["amount"]["asset_id"]) + + # Amounts for fee and transfer + fee_amount = op["fee"]["amount"] / float(10**int(fee_asset["precision"])) + amount_amount= op["amount"]["amount"] / float(10**int(amount_asset["precision"])) + + # Get accounts involved + from_account = api.getObject(op["from"]) + to_account = api.getObject(op["to"]) + + # Decode the memo + memo = op["memo"] + try : # if possible + privkey = PrivateKey(memo_wif_key) + pubkey = PublicKey(memo["from"], prefix=prefix) + memomsg = Memo.decode_memo(privkey, pubkey, memo["nonce"], memo["message"]) + except Exception as e: # if not possible + memomsg = "--cannot decode-- (%s)" % str(e) + + # Print out + print("last_op: %s | block:%s | from %s -> to: %s | fee: %f %s | amount: %f %s | memo: %s" % ( + opID, block, + from_account["name"], to_account["name"], + fee_amount, fee_asset["symbol"], + amount_amount, amount_asset["symbol"], + memomsg)) + + if __name__ == '__main__': + ## Monitor definitions + protocol = GrapheneMonitor + protocol.last_op = last_op ## last operation logged + protocol.account_id = "1.2.%s" % accountID.split(".")[2] ## account to monitor + ## Open Up Graphene Websocket API + api = GrapheneWebsocket(host, port, user, password, protocol) + ## Set Callback for object changes + api.setObjectCallbacks({accountID : protocol.onAccountUpdate}) + ## Run the Websocket connection continuously + api.connect() + api.run_forever() diff --git a/docs/howto-monitor-operations.rst b/docs/howto-monitor-operations.rst new file mode 100644 index 0000000000000000000000000000000000000000..b8db4ad600323d3e42c032f516b9a1c515177a9c --- /dev/null +++ b/docs/howto-monitor-operations.rst @@ -0,0 +1,31 @@ +*************************************************** +Howto Monitor the blockchain for certain operations +*************************************************** + +Operations in blocks can be monitored relatively easy by using the +`block_stream` (for entire blocks) for `stream` (for specific +operations) generators. + +The following example will only show ``transfer`` operations on the +blockchain: + +.. code-block:: python + + from grapheneapi.grapheneclient import GrapheneClient + from pprint import pprint + + class Config(): + witness_url = "ws://testnet.bitshares.eu/ws" + + if __name__ == '__main__': + client = GrapheneClient(Config) + for b in client.ws.stream("transfer"): + pprint(b) + +Note that you can define a starting block and instead of waiting for +sufficient confirmations (irreversible blocks), you can also consider +the real *head* block with: + +.. code-block:: python + + for b in client.ws.stream("transfer", start=199924, mode="head"): diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..a2146a9afbda8b8a5aaf87b93b4d5461bbdead03 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,74 @@ +.. python-graphenelib documentation master file, created by + sphinx-quickstart on Fri Jun 5 14:06:38 2015. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +.. http://sphinx-doc.org/rest.html + http://sphinx-doc.org/markup/index.html + http://sphinx-doc.org/markup/para.html + http://openalea.gforge.inria.fr/doc/openalea/doc/_build/html/source/sphinx/rest_syntax.html + http://rest-sphinx-memo.readthedocs.org/en/latest/ReST.html + +Welcome to python-graphenelib's documentation! +=============================================== + +Graphene is a blockchain technology (i.e. software) that allows for fast +transactions and decentralized trading of assets as well as customized on-chain +smart contracts. + +In practice, Graphene is only a concept implementation and does not directly +have its own public blockchain. + +The first public blockchain to use the Graphene technology is *BitShares 2.0*. +However, this library should be able to interface with any other Graphene-based +blockchain, too. + +Python-Graphene Libraries +------------------------- +.. toctree:: + :maxdepth: 3 + + installation + classes + +Decentralized Exchange (DEX) +---------------------------- +.. toctree:: + :maxdepth: 1 + + exchange + +Tutorials +--------- +.. toctree:: + :maxdepth: 1 + + howto-monitor-operations + howto-exchanges + howto-exchanges-detailed + +Scripts +-------- +.. toctree:: + :maxdepth: 1 + + scripts-monitor + scripts-pricefeed + +Graphene API +------------ +.. toctree:: + :maxdepth: 1 + + graphene-objects + graphene-api + graphene-ws + witness + wallet + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/installation.rst b/docs/installation.rst new file mode 100644 index 0000000000000000000000000000000000000000..166fdbe3a837eca7fbae1649871167eb86bb4e86 --- /dev/null +++ b/docs/installation.rst @@ -0,0 +1,34 @@ +************ +Installation +************ + +Dependencies +############ + +:: + $ sudo apt-get install libffi-dev libssl-dev python-dev + + +Install Library +############### + +Install with `pip`: + +:: + + $ pip3 install graphenelib + +Manual installation: + +:: + + $ git clone https://github.com/xeroc/python-graphenlib/ + $ cd python-graphenlib + $ python3 setup.py install --user + +Upgrade +####### + +:: + + $ pip install --user --upgrade graphenelib diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..f801af9f96b61a424235e281024c8f6cbfb2b326 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,263 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^<target^>` where ^<target^> is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + echo. coverage to run coverage check of the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +REM Check if sphinx-build is available and fallback to Python version if any +%SPHINXBUILD% 2> nul +if errorlevel 9009 goto sphinx_python +goto sphinx_ok + +:sphinx_python + +set SPHINXBUILD=python -m sphinx.__init__ +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +:sphinx_ok + + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\python-graphenelib.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\python-graphenelib.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "coverage" ( + %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage + if errorlevel 1 exit /b 1 + echo. + echo.Testing of coverage in the sources finished, look at the ^ +results in %BUILDDIR%/coverage/python.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end diff --git a/docs/memo.rst b/docs/memo.rst new file mode 100644 index 0000000000000000000000000000000000000000..e4214eb65db7671cc6c1fd6e33f4b1d4bdc8181e --- /dev/null +++ b/docs/memo.rst @@ -0,0 +1,75 @@ +**** +Memo +**** + +Memo Keys +######### + +In Graphene, memos are AES-256 encrypted with a shared secret between sender and +receiver. It is derived from the memo private key of the sender and the memo +publick key of the receiver. + +In order for the receiver to decode the memo, the shared secret has to be +derived from the receiver's private key and the senders public key. + +The memo public key is part of the account and can be retreived with the +`get_account` call: + +.. code-block:: js + + get_account <accountname> + { + [...] + "options": { + "memo_key": "GPH5TPTziKkLexhVKsQKtSpo4bAv5RnB8oXcG4sMHEwCcTf3r7dqE", + [...] + }, + [...] + } + +while the memo private key can be dumped with `dump_private_keys` + +Memo Message +############ + +The take the following form: + +.. code-block:: js + + { + "from": "GPH5mgup8evDqMnT86L7scVebRYDC2fwAWmygPEUL43LjstQegYCC", + "to": "GPH5Ar4j53kFWuEZQ9XhxbAja4YXMPJ2EnUg5QcrdeMFYUNMMNJbe", + "nonce": "13043867485137706821", + "message": "d55524c37320920844ca83bb20c8d008" + } + +The fields `from` and `to` contain the memo public key of sender and receiver. +The `nonce` is a random integer that is used for the seed of the AES encryption +of the message. + +Example +####### + +.. code-block:: python + + from graphenebase import Memo, PrivateKey, PublicKey + + wifkey = "5....<wif>" + memo = { + "from": "GPH5mgup8evDqMnT86L7scVebRYDC2fwAWmygPEUL43LjstQegYCC", + "to": "GPH5Ar4j53kFWuEZQ9XhxbAja4YXMPJ2EnUg5QcrdeMFYUNMMNJbe", + "nonce": "13043867485137706821", + "message": "d55524c37320920844ca83bb20c8d008" + } + try : + privkey = PrivateKey(wifkey) + pubkey = PublicKey(memo["from"], prefix=prefix) + memomsg = Memo.decode_memo(privkey, pubkey, memo["nonce"], memo["message"]) + except Exception as e: + memomsg = "--cannot decode-- %s" % str(e) + +Definitions +########### + +.. automodule:: graphenebase.memo + :members: diff --git a/docs/proposal.rst b/docs/proposal.rst new file mode 100644 index 0000000000000000000000000000000000000000..57ff842807045c73e7e97eeb26f2e2f06cba129d --- /dev/null +++ b/docs/proposal.rst @@ -0,0 +1,6 @@ +********* +Proposals +********* + +.. autoclass:: grapheneextra.proposal.Proposal + :members: diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..ac599fc8331f58be5b3dff251f1065edce2d0184 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,3 @@ +autobahn>=0.14 +pycrypto==2.6.1 +graphenelib diff --git a/docs/rpc.rst b/docs/rpc.rst new file mode 100644 index 0000000000000000000000000000000000000000..a928c59fd232814ef872e6cd0da30985dfddd815 --- /dev/null +++ b/docs/rpc.rst @@ -0,0 +1,24 @@ +************* +RPC Interface +************* + +.. note:: This is a low level class that can be used in combination with + GrapheneClient + +We now need to distinguish functionalities. If we want to only access the +blockchain and do not want to perform on-chain operations like transfers or +orders, we are fine to interface with any accessible witness node. In contrast, +if we want to perform operations that modify the current blockchain state, e.g. +construct and broadcast transactions, we are required to interface with a +cli_wallet that has the required private keys imported. We here assume: + +* port: 8090 - witness +* port: 8092 - wallet + +.. note:: The witness API has a different instruction set than the wallet! + +Definition +########## + +.. autoclass:: grapheneapi.grapheneapi.GrapheneAPI + :members: _confirm, rpcexec, __getattr__ diff --git a/docs/scripts-monitor.rst b/docs/scripts-monitor.rst new file mode 100644 index 0000000000000000000000000000000000000000..162ce6bf1c3fd4ed21d6658983734af597a39a06 --- /dev/null +++ b/docs/scripts-monitor.rst @@ -0,0 +1,92 @@ +******************* +Monitoring Accounts +******************* + +To monitor accounts, we recommend to either use the `get_full_accounts` call or +to enable notifications manually in order to fetch the current state of an +account and *automatically* subscribe to future account updates including +balance update. + +Example Notifications +##################### + +A notification after a transaction would take the form::: + + [[ + { + "owner": "1.2.3184", + "balance": 1699918247, + "id": "2.5.3", + "asset_type": "1.3.0" + }, + { + "most_recent_op": "2.9.74", + "pending_vested_fees": 6269529, + "total_core_in_orders": 0, + "pending_fees": 0, + "owner": "1.2.3184", + "id": "2.6.3184", + "lifetime_fees_paid": 50156232 + } + ]] + +Please distinguish transactions from operations: Since a single transaction may +contain several (independent) operations, monitoring an account may only +require to investigate *operations* that change the account. + +Implementation Details +###################### + +In order to access the websocket functionalities, we can extend the +`GrapheneWebsocketProtocol` class. + +.. code-block:: python + + import json + from grapheneapi import GrapheneWebsocket, GrapheneWebsocketProtocol + + class GrapheneMonitor(GrapheneWebsocketProtocol) : + def __init__(self) : + super().__init__() + + def printJson(self,d) : print(json.dumps(d,indent=4)) + + def getTxFromOp(self, op) : + # print the most recent operation for our account! + self.getObject(op[0]["operation_id"], self.printJson) + + def onAccountUpdate(self, data) : + # most recent operation and callback getTxFromOp + self.getObject(data["most_recent_op"], self.getTxFromOp) + +We now have a set of 3 routines, `printJson` only dumps the available data. +The method `onAccountUpdate` will be trigged by the notification and will be +passed the notification's content. The type of the notification will be similar +to the object subscribed. Hence, if you subscribe to an object "2.6.12", you +will be notified about changes of "2.6.12" and the notification will carry the +id "2.6.12". In our case, 2.6.* represent operations that modify our account +balance and we get the id of the most recent operation that caused it. + +The call `getObject` tries to resolve the id and hand out the corresponding +data from memory if available, or retrieve the object from the witness. +The `getObject` call accepts a callback as second parameter which will be +passed the ouptut of the query. In our case `self.getTxFromOp` performs another +object lookup before dumping the operations details in json format. + +To register a notification and listen to the witness, we run: + +.. code-block:: python + + if __name__ == '__main__': + protocol = GrapheneMonitor + monitor = GrapheneWebsocket("localhost", 8090, "", "", protocol) + monitor.setObjectCallbacks({ + "2.6.69491" : protocol.onAccountUpdate, + }) + monitor.connect() + monitor.run_forever() + +The protocol `GrapheneMonitor` has been defined above, the api connection is +established with `GrapheneWebsocket` and the callbacks are registered with +`monitor.setObjectCallbacks`. The websocket connection is initiated an listened +to with the last two lines. diff --git a/docs/scripts-pricefeed.rst b/docs/scripts-pricefeed.rst new file mode 100644 index 0000000000000000000000000000000000000000..5c4c3ff6c171e9eaeeb7823aa7eae9c307512200 --- /dev/null +++ b/docs/scripts-pricefeed.rst @@ -0,0 +1,83 @@ +********** +Price Feed +********** +(advanced users only) +(active witnesses only) + +Requirements +############ + +We first need to install numpy and prettytable (besides autobahn and requests +for the library). + +.. code-block:: bash + + apt-get install python3 python3-pip python-autobahn + pip3 install requests --upgrade + pip3 install numpy prettytable autobahn crypto + +Cli Wallet +########## + +Make sure to launch your ``cli_wallet`` with the ``-H`` or +``--rpc-http-endpoint`` followed by ``127.0.0.1:8092`` (or any other ip:port). + +.. note:: Do not expose the cli_wallet API connection to the internet as this + may lead to loss of your funds! + +Since the cli_wallet has no P2P connection capabilities, you need to connect it +to either your own witness node or a publicly accessible node: + +.. code-block:: bash + + programs/cli_wallet/cli_wallet --rpc-http-endpoint="127.0.0.1:8092" -s "<ip-of-full/witness-node:port>" + +Hence, the overall network setting would look similar to::: + + P2P network <-> Full/Witness Node <-> Wallet <- Feed script + +.. note:: Do not interface with the witness/full node directly. This will not + work! + +Configuration +############# + +This (rather basic) price feed script is located in ``scripts/pricefeeds`` +together with an example configuration file: + +.. code-block:: bash + + cd scripts/pricefeeds/ + cp config-example.py config.py + # edit config.py + +Editing the ``config.py`` and be sure to set the RPC-client connection settings: + +* the host to ``"127.0.0.1"`` (with quotes) +* and the port to ``8092`` +* you can either put your unlock password into ``unlock`` or manually unlock + your wallet before starting the script +* unless you are an expert there is no need to put user/pw info +* change your name of your witness in delegate_list + +Running +####### + +1. unlock your wallet +2. ``python3 pricefeeds.py`` +3. You will be asked to provide confirmation of the prices! + +.. Cronjon + ####### + +.. Since the script fetches its data from other exchanges that may throttle your + polling frequency, and you may want to run the feed script regularily, we + recommend to setup your ``cron``-job as follows: + +.. .. code-block:: cron + +.. */2 * * * * /home/<user>/<path>/scripts/pricefeed/pricefeeds.py >> /home/<user>/feed-update.log + +.. This will execute the script twice per hour and append the log into + ``feed-update.log`` in your home directory. + diff --git a/docs/wallet.rst b/docs/wallet.rst new file mode 100644 index 0000000000000000000000000000000000000000..0cdcdc9f3f367c1f3c4418ef1b675f14506153c7 --- /dev/null +++ b/docs/wallet.rst @@ -0,0 +1,33 @@ +Console Wallet +============== + +The following will explain how to use the console wallet (not GUI). + +Launching +--------- + +The `cli_wallet` creates a local `wallet.json` file that contains the encrypted +private keys required to access the funds in your account. It requires a +running witness node (not necessarily locally) and can be launched with + +.. code-block:: bash + + programs/cli_wallet/cli_wallet -s ws://127.0.0.1:8090 + +Depending on the actual chain that you want to connect to your may need to +specifiy `--chain-id`. + +Enabling Remote Procedure Calls (RPC) +------------------------------------- + +In order to allow RPC calls for wallet operations (spend, buy, sell, ...) you +can choose between pure RPC or RPC-HTTP requests. In this tutorial, the latter +is prefered since well established libraries make use of the RPC-HTTP protocol. +To enable RPC-HTTP in your wallet you need to run + +.. code-block:: bash + + programs/cli_wallet/cli_wallet --rpc-http-endpoint="127.0.0.1:8092" + +This will open the port 8092 for local queries only. It is not recommended to +publicly expose your wallet! diff --git a/docs/websocket.rst b/docs/websocket.rst new file mode 100644 index 0000000000000000000000000000000000000000..b6cbc6882e3bf2d45a0c46e1cbd073ac76e980bb --- /dev/null +++ b/docs/websocket.rst @@ -0,0 +1,48 @@ +********* +Websocket +********* + +.. note:: This is a low level class that can be used in combination with + GrapheneClient + +Example +####### + +For more examples see the provided scripts. + +Run method on every new block +***************************** + +In order to access the websocket functionalities, we need to extend the +``GrapheneWebsocketProtocol`` class: + +.. code-block:: python + + from grapheneapi import GrapheneWebsocket, GrapheneWebsocketProtocol + + class GrapheneMonitor(GrapheneWebsocketProtocol) : + def __init__(self) : + super().__init__() + + def onBlock(self, data) : + print(data) + + if __name__ == '__main__': + protocol = GrapheneMonitor + api = GrapheneWebsocket(config.url, config.user, config.password, protocol) + + ## Set Callback for object changes + api.setObjectCallbacks({"2.0.0" : protocol.onBlock}) + + ## Run the Websocket connection continuously + api.connect() + api.run_forever() + +Definitions +########### + +.. autoclass:: grapheneapi.graphenews.GrapheneWebsocket + :members: + +.. autoclass:: grapheneapi.graphenewsprotocol.GrapheneWebsocketProtocol + :members: diff --git a/docs/websocketrpc.rst b/docs/websocketrpc.rst new file mode 100644 index 0000000000000000000000000000000000000000..503c6b1a5ad1c197c4830f36efb32f294a8aebad --- /dev/null +++ b/docs/websocketrpc.rst @@ -0,0 +1,13 @@ +************ +WebsocketRPC +************ + +.. note:: This is a low level class that can be used in combination with + GrapheneClient + +This class allows to call API methods exposed by the witness node via +websockets. It does **not** support notifications and is not run +asynchronously. + +.. autoclass:: grapheneapi.graphenewsrpc.GrapheneWebsocketRPC + :members: diff --git a/docs/witness.rst b/docs/witness.rst new file mode 100644 index 0000000000000000000000000000000000000000..25a3045709b5a3306f674f44908f228a14dfd03b --- /dev/null +++ b/docs/witness.rst @@ -0,0 +1,101 @@ +Witness +======= + +A witness node represents a full node in the network that verifies all +transactions and blocks against its local state. Hence, we recommend all +service providers to run an maintain their own witness nodes for reliability +and security reasons. + +It takes a `--data-dir` parameter to define a working and data directory to +store the configuration, blockchain and local databases. Those will be +automatically created with default settings if they don't exist locally set. + +Launching a witness node +------------------------ + +The witness is launched according to: + +.. code-block:: bash + + ./programs/witness_node/witness_node --data-dir="mydata" + +Configuration +------------- + +The configuration file `config.ini` in `mydata` is commented and contains the +following essential settings: + +.. code-block:: ini + + # Endpoint for P2P node to listen on + # p2p-endpoint = + + # P2P nodes to connect to on startup (may specify multiple times) + # seed-node = + + # Pairs of [BLOCK_NUM,BLOCK_ID] that should be enforced as checkpoints. + # checkpoint = + + # Endpoint for websocket RPC to listen on + # rpc-endpoint = 0.0.0.0:8090 + + # Endpoint for TLS websocket RPC to listen on + # rpc-tls-endpoint = + + # The TLS certificate file for this server + # server-pem = + + # Password for this certificate + # server-pem-password = + + # File to read Genesis State from + # genesis-json = sep-18-testnet-genesis.json + + # JSON file specifying API permissions + # api-access = apiaccess.json + + # Enable block production, even if the chain is stale. + enable-stale-production = false + + # Percent of witnesses (0-99) that must be participating in order to produce blocks + required-participation = false + + # Allow block production, even if the last block was produced by the same witness. + allow-consecutive = false + + # ID of witness controlled by this node (e.g. "1.6.5", quotes are required, may specify multiple times) + # witness-id = + + # Tuple of [PublicKey, WIF private key] (may specify multiple times) + # private-key = ["pubkey","wif-key"] + + # Account ID to track history for (may specify multiple times) + # track-account = + + # Track market history by grouping orders into buckets of equal size measured in seconds specified as a JSON array of numbers + # bucket-size = [15,60,300,3600,86400] + + # How far back in time to track history for each bucket size, measured in the number of buckets (default: 1000) + # history-per-size = 1000 + +Enabling Remote Procedure Calls (RPC) +------------------------------------- + +In order to allow RPC calls for blockchain operations you need to modify the +following entry in the configuration file: + +.. code-block:: bash + + rpc-endpoint = 0.0.0.0:8090 + +This will open the port 8090 for global queries only. Since the witness node +only maintains the blockchain and (unless you are an actively block producing +witness) no private keys are involved, it is safe to expose your witness to the +internet. + +Restarting the witness node +--------------------------- + +When restarting the witness node, it may be required to append the +`--replay-blockchain` parameter to regenerate the local (in-memory) blockchain +state. diff --git a/setup.py b/setup.py index 98f59f133abfca54d64a88bf127e760a81543340..a737ebc0d434c15849d69154b9b279222df61cf5 100755 --- a/setup.py +++ b/setup.py @@ -25,7 +25,12 @@ setup(name='bitshares', maintainer_email='<Fabian@BitShares.eu>', url='http://www.github.com/xeroc/python-bitshares', keywords=['bitshares', 'library', 'api', 'rpc'], - packages=["bitshares", "bitsharesapi", "bitsharesbase"], + packages=[ + "bitshares", + "bitsharesapi", + "bitsharesbase", + "bitsharesdeprecated" + ], classifiers=['License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3', diff --git a/test/test_transactions.py b/test/test_transactions.py new file mode 100644 index 0000000000000000000000000000000000000000..ecea84b6560a269bf70c69476e02e2914e14f719 --- /dev/null +++ b/test/test_transactions.py @@ -0,0 +1,518 @@ +from bitsharesbase import ( + transactions, + memo, + account, + operations, + objects +) +from bitsharesbase.objects import Operation +from bitsharesbase.signedtransactions import Signed_Transaction +from bitsharesbase.account import PrivateKey +import random +import unittest +from pprint import pprint +from binascii import hexlify + +prefix = "BTS" +wif = "5KQwrPbwdL6PhXujxW37FSSQZ1JiwsST4cqQzDeyXtP79zkvFD3" +ref_block_num = 34294 +ref_block_prefix = 3707022213 +expiration = "2016-04-06T08:29:27" + + +class Testcases(unittest.TestCase): + + def test_call_update(self): + op = operations.Call_order_update(**{ + 'fee': {'amount': 100, + 'asset_id': '1.3.0'}, + 'delta_debt': {'amount': 10000, + 'asset_id': '1.3.22'}, + 'delta_collateral': {'amount': 100000000, + 'asset_id': '1.3.0'}, + 'funding_account': '1.2.29', + 'extensions': [] + }) + ops = [Operation(op)] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + compare = ("f68585abf4dce7c8045701036400000000000000001d00e1f" + "50500000000001027000000000000160000011f2627efb5c5" + "144440e06ff567f1a09928d699ac6f5122653cd7173362a1a" + "e20205952c874ed14ccec050be1c86c1a300811763ef3b481" + "e562e0933c09b40e31fb") + self.assertEqual(compare[:-130], txWire[:-130]) + + def test_limit_order_create(self): + op = operations.Limit_order_create(**{ + "fee": {"amount": 100, + "asset_id": "1.3.0" + }, + "seller": "1.2.29", + "amount_to_sell": {"amount": 100000, + "asset_id": "1.3.0" + }, + "min_to_receive": {"amount": 10000, + "asset_id": "1.3.105" + }, + "expiration": "2016-05-18T09:22:05", + "fill_or_kill": False, + "extensions": [] + }) + ops = [Operation(op)] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + compare = ("f68585abf4dce7c8045701016400000000000000001da08601000" + "0000000001027000000000000693d343c57000000011f75cbfd49" + "ae8d9b04af76cc0a7de8b6e30b71167db7fe8e2197ef9d858df18" + "77043493bc24ffdaaffe592357831c978fd8a296b913979f106de" + "be940d60d77b50") + self.assertEqual(compare[:-130], txWire[:-130]) + + def test_limit_order_cancel(self): + op = operations.Limit_order_cancel(**{ + "fee": {"amount": 0, + "asset_id": "1.3.0" + }, + "fee_paying_account": "1.2.104", + "order": "1.7.51840", + "extensions": [] + }) + ops = [Operation(op)] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + compare = ("f68585abf4dce7c8045701020000000000000000006880950300000" + "11f3fb754814f3910c1a8845486b86057d2b4588ae559b4c3810828" + "c0d4cbec0e5b23517937cd7e0cc5ee8999d0777af7fe56d3c4b2e58" + "7421bfb7400d4efdae97a") + self.assertEqual(compare[:-130], txWire[:-130]) + + def test_proposal_update(self): + op = operations.Proposal_update(**{ + 'fee_paying_account': "1.2.1", + 'proposal': "1.10.90", + 'active_approvals_to_add': ["1.2.5"], + "fee": objects.Asset(amount=12512, asset_id="1.3.0"), + }) + ops = [Operation(op)] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + compare = ("f68585abf4dce7c804570117e03000000000000000015a01050000000" + "000000001203255378db6dc19443e74421c954ad7fdcf23f4ea45fe4f" + "e5a1b078a0f94fb529594819c9799d68efa5cfb5b271a9333a2f516ca" + "4fb5093226275f48a42d9e8cf") + self.assertEqual(compare[:-130], txWire[:-130]) + + def test_Transfer(self): + pub = format(account.PrivateKey(wif).pubkey, prefix) + from_account_id = "1.2.0" + to_account_id = "1.2.1" + amount = 1000000 + asset_id = "1.3.4" + message = "abcdefgABCDEFG0123456789" + nonce = "5862723643998573708" + + fee = objects.Asset(amount=0, asset_id="1.3.0") + amount = objects.Asset(amount=int(amount), asset_id=asset_id) + encrypted_memo = memo.encode_memo( + account.PrivateKey(wif), + account.PublicKey(pub, prefix=prefix), + nonce, + message + ) + memoStruct = { + "from": pub, + "to": pub, + "nonce": nonce, + "message": encrypted_memo, + "chain": prefix + } + memoObj = objects.Memo(**memoStruct) + op = operations.Transfer(**{ + "fee": fee, + "from": from_account_id, + "to": to_account_id, + "amount": amount, + "memo": memoObj + }) + ops = [Operation(op)] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + + compare = ("f68585abf4dce7c804570100000000000000000000000140420" + "f0000000000040102c0ded2bc1f1305fb0faac5e6c03ee3a192" + "4234985427b6167ca569d13df435cf02c0ded2bc1f1305fb0fa" + "ac5e6c03ee3a1924234985427b6167ca569d13df435cf8c94d1" + "9817945c5120fa5b6e83079a878e499e2e52a76a7739e9de409" + "86a8e3bd8a68ce316cee50b210000011f39e3fa7071b795491e" + "3b6851d61e7c959be92cc7deb5d8491cf1c3c8c99a1eb44553c" + "348fb8f5001a78b18233ac66727e32fc776d48e92d9639d64f6" + "8e641948") + self.assertEqual(compare[:-130], txWire[:-130]) + + def test_pricefeed(self): + feed = objects.PriceFeed(**{ + "settlement_price": objects.Price( + base=objects.Asset(amount=214211, asset_id="1.3.0"), + quote=objects.Asset(amount=1241, asset_id="1.3.14"), + ), + "core_exchange_rate": objects.Price( + base=objects.Asset(amount=1241, asset_id="1.3.0"), + quote=objects.Asset(amount=6231, asset_id="1.3.14"), + ), + "maximum_short_squeeze_ratio": 1100, + "maintenance_collateral_ratio": 1750, + }) + + op = operations.Asset_publish_feed( + fee=objects.Asset(amount=100, asset_id="1.3.0"), + publisher="1.2.0", + asset_id="1.3.3", + feed=feed + ) + ops = [Operation(op)] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + + compare = ("f68585abf4dce7c8045701136400000000000000000003c344030" + "00000000000d9040000000000000ed6064c04d904000000000000" + "0057180000000000000e0000012009e13f9066fedc3c8c1eb2ac3" + "3b15dc67ecebf708890d0f8ab62ec8283d1636002315a189f1f5a" + "a8497b41b8e6bb7c4dc66044510fae25d8f6aebb02c7cdef10") + self.assertEqual(compare[:-130], txWire[:-130]) + + def test_jsonLoading(self): + data1 = {"expiration": expiration, + "ref_block_num": ref_block_num, + "ref_block_prefix": ref_block_prefix, + "extensions": [], + "operations": [[0, + {"amount": {"amount": 1000000, "asset_id": "1.3.4"}, + "extensions": [], + "fee": {"amount": 0, "asset_id": "1.3.0"}, + "from": "1.2.0", + "memo": {"from": "BTS6MRyAjQq8ud7hVNYcfnVPJqcVpscN5So8BhtHuGYqET5GDW5CV", + "message": "fa5b6e83079a878e499e2e52a76a7739e9de40986a8e3bd8a68ce316cee50b21", + "nonce": 5862723643998573708, + "to": "BTS6MRyAjQq8ud7hVNYcfnVPJqcVpscN5So8BhtHuGYqET5GDW5CV"}, + "to": "1.2.1"}]], + "signatures": ["1f6c1e8df5faf18c3b057ce713ec92f9" + + "b487c1ba58138daabc0038741b402c93" + + "0d63d8d63861740215b4f65eb8ac9185" + + "a3987f8239b962181237f47189e21102" + + "af"]} + a = Signed_Transaction(data1.copy()) + data2 = a.json() + + check1 = data1 + check2 = data2 + for key in ["expiration", "extensions", "ref_block_num", "ref_block_prefix", "signatures"]: + self.assertEqual(check1[key], check2[key]) + + check1 = data1["operations"][0][1] + check2 = data2["operations"][0][1] + for key in ["from", "to"]: + self.assertEqual(check1[key], check2[key]) + + check1 = data1["operations"][0][1]["memo"] + check2 = data2["operations"][0][1]["memo"] + for key in check1: + self.assertEqual(check1[key], check2[key]) + + def test_fee_pool(self): + s = {"fee": {"amount": 10001, + "asset_id": "1.3.0" + }, + "from_account": "1.2.282", + "asset_id": "1.3.32", + "amount": 15557238, + "extensions": [] + } + op = operations.Asset_fund_fee_pool(**s) + ops = [Operation(op)] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + compare = ("f68585abf4dce7c8045701101127000000000000009a02207662" + "ed00000000000000011f39f7dc7745076c9c7e612d40c68ee92d" + "3f4b2696b1838037ce2a35ac259883ba6c6c49d91ad05a7e78d8" + "0bb83482c273dbbc911587487bf468b85fb4f537da3d") + self.assertEqual(compare[:-130], txWire[:-130]) + + def test_override_transfer(self): + s = {"fee": {"amount": 0, + "asset_id": "1.3.0"}, + "issuer": "1.2.29", + "from": "1.2.104", + "to": "1.2.29", + "amount": {"amount": 100000, + "asset_id": "1.3.105"}, + "extensions": [] + } + op = operations.Override_transfer(**s) + ops = [Operation(op)] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + compare = ("f68585abf4dce7c8045701260000000000000000001d681da086" + "01000000000069000000012030cc81722c3e67442d2f59deba18" + "8f6079c8ba2d8318a642e6a70a125655515f20e2bd3adb2ea886" + "cdbc7f6590c7f8c80818d9176d9085c176c736686ab6c9fd") + self.assertEqual(compare[:-130], txWire[:-130]) + + def test_create_account(self): + s = {"fee": {"amount": 1467634, + "asset_id": "1.3.0" + }, + "registrar": "1.2.33", + "referrer": "1.2.27", + "referrer_percent": 3, + "name": "foobar-f124", + "owner": {"weight_threshold": 1, + "account_auths": [], + 'key_auths': [['BTS6pbVDAjRFiw6fkiKYCrkz7PFeL7XNAfefrsREwg8MKpJ9VYV9x', + 1], [ + 'BTS6zLNtyFVToBsBZDsgMhgjpwysYVbsQD6YhP3kRkQhANUB4w7Qp', + 1]], + "address_auths": [] + }, + "active": {"weight_threshold": 1, + "account_auths": [], + 'key_auths': [['BTS6pbVDAjRFiw6fkiKYCrkz7PFeL7XNAfefrsREwg8MKpJ9VYV9x', + 1], [ + 'BTS6zLNtyFVToBsBZDsgMhgjpwysYVbsQD6YhP3kRkQhANUB4w7Qp', + 1], [ + 'BTS8CemMDjdUWSV5wKotEimhK6c4dY7p2PdzC2qM1HpAP8aLtZfE7', + 1 + ]], + "address_auths": [] + }, + "options": {"memo_key": "BTS5TPTziKkLexhVKsQKtSpo4bAv5RnB8oXcG4sMHEwCcTf3r7dqE", + "voting_account": "1.2.5", + "num_witness": 0, + "num_committee": 0, + "votes": [], + "extensions": [] + }, + "extensions": {} + } + op = operations.Account_create(**s) + ops = [Operation(op)] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + compare = ("f68585abf4dce7c804570105f26416000000000000211b03000b666f" + "6f6261722d6631323401000000000202fe8cc11cc8251de6977636b5" + "5c1ab8a9d12b0b26154ac78e56e7c4257d8bcf6901000314aa202c91" + "58990b3ec51a1aa49b2ab5d300c97b391df3beb34bb74f3c62699e01" + "000001000000000303b453f46013fdbccb90b09ba169c388c34d8445" + "4a3b9fbec68d5a7819a734fca0010002fe8cc11cc8251de6977636b5" + "5c1ab8a9d12b0b26154ac78e56e7c4257d8bcf6901000314aa202c91" + "58990b3ec51a1aa49b2ab5d300c97b391df3beb34bb74f3c62699e01" + "0000024ab336b4b14ba6d881675d1c782912783c43dbbe31693aa710" + "ac1896bd7c3d61050000000000000000011f61ad276120bc3f189296" + "2bfff7db5e8ce04d5adec9309c80529e3a978a4fa1073225a6d56929" + "e34c9d2a563e67a8f4a227e4fadb4a3bb6ec91bfdf4e57b80efd") + self.assertEqual(compare[:-130], txWire[:-130]) + + def test_create_proposal(self): + op = operations.Proposal_create(**{ + "fee": {"amount": 0, + "asset_id": "1.3.0" + }, + "fee_paying_account": "1.2.0", + "expiration_time": "1970-01-01T00:00:00", + "proposed_ops": [{ + "op": [ + 0, {"fee": {"amount": 0, + "asset_id": "1.3.0" + }, + "from": "1.2.0", + "to": "1.2.0", + "amount": {"amount": 0, + "asset_id": "1.3.0" + }, + "extensions": []}]}], + "extensions": [] + }) + ops = [Operation(op)] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + compare = ("f68585abf4dce7c80457011600000000000000000000000000" + "00010000000000000000000000000000000000000000000000" + "00000001204baf7f11a7ff12337fc097ac6e82e7b68f82f02c" + "c7e24231637c88a91ae5716674acec8a1a305073165c65e520" + "a64769f5f62c0301ce21ab4f7c67a6801b4266") + self.assertEqual(compare[:-130], txWire[:-130]) + + def test_asset_update(self): + op = operations.Asset_update(**{ + "fee": {"amount": 0, + "asset_id": "1.3.0"}, + "issuer": "1.2.0", + "asset_to_update": "1.3.0", + "new_options": { + "max_supply": "1000000000000000", + "market_fee_percent": 0, + "max_market_fee": "1000000000000000", + "issuer_permissions": 79, + "flags": 0, + "core_exchange_rate": { + "base": {"amount": 0, + "asset_id": "1.3.0"}, + "quote": {"amount": 0, + "asset_id": "1.3.0"} + }, + "whitelist_authorities": ["1.2.12", "1.2.13"], + "blacklist_authorities": ["1.2.10", "1.2.11"], + "whitelist_markets": ["1.3.10", "1.3.11"], + "blacklist_markets": ["1.3.12", "1.3.13"], + "description": "Foobar", + "extensions": [] + }, + "extensions": [] + }) + ops = [Operation(op)] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + compare = ("f68585abf4dce7c80457010b00000000000000000000000000" + "80c6a47e8d030000000080c6a47e8d03004f00000000000000" + "0000000000000000000000000000020c0d020a0b020a0b020c" + "0d06466f6f626172000000011f5bd6a206d210d1d78eb423e0" + "c2362013aa80830a8e61e5df2570eac05f1c57a4165c99099f" + "c2e97ecbf2b46014c96a6f99cff8d20f55a6042929136055e5" + "ad10") + self.assertEqual(compare[:-130], txWire[:-130]) + + def compareConstructedTX(self): + # def test_online(self): + # self.maxDiff = None + op = operations.Asset_update(**{ + "fee": {"amount": 0, + "asset_id": "1.3.0"}, + "issuer": "1.2.0", + "asset_to_update": "1.3.0", + "new_options": { + "max_supply": "1000000000000000", + "market_fee_percent": 0, + "max_market_fee": "1000000000000000", + "issuer_permissions": 79, + "flags": 0, + "core_exchange_rate": { + "base": {"amount": 0, + "asset_id": "1.3.0"}, + "quote": {"amount": 0, + "asset_id": "1.3.0"} + }, + "whitelist_authorities": ["1.2.12", "1.2.13"], + "blacklist_authorities": ["1.2.10", "1.2.11"], + "whitelist_markets": ["1.3.10", "1.3.11"], + "blacklist_markets": ["1.3.12", "1.3.13"], + "description": "Foobar", + "extensions": [] + }, + "extensions": [] + }) + ops = [Operation(op)] + tx = Signed_Transaction( + ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops + ) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + print("=" * 80) + pprint(tx.json()) + print("=" * 80) + + from grapheneapi.grapheneapi import GrapheneAPI + rpc = GrapheneAPI("localhost", 8092) + compare = rpc.serialize_transaction(tx.json()) + print(compare[:-130]) + print(txWire[:-130]) + print(txWire[:-130] == compare[:-130]) + self.assertEqual(compare[:-130], txWire[:-130]) + + def compareNewWire(self): + # def test_online(self): + # self.maxDiff = None + + from grapheneapi.grapheneapi import GrapheneAPI + rpc = GrapheneAPI("localhost", 8092) + tx = rpc.create_account("xeroc", "fsafaasf", "", False) + pprint(tx) + compare = rpc.serialize_transaction(tx) + ref_block_num = tx["ref_block_num"] + ref_block_prefix = tx["ref_block_prefix"] + expiration = tx["expiration"] + + ops = [Operation(operations.Account_create(**tx["operations"][0][1]))] + tx = Signed_Transaction(ref_block_num=ref_block_num, + ref_block_prefix=ref_block_prefix, + expiration=expiration, + operations=ops) + tx = tx.sign([wif], chain=prefix) + tx.verify([PrivateKey(wif).pubkey], "BTS") + txWire = hexlify(bytes(tx)).decode("ascii") + print("\n") + print(txWire[:-130]) + print(compare[:-130]) + # self.assertEqual(compare[:-130], txWire[:-130]) + +if __name__ == '__main__': + t = Testcases() + t.compareConstructedTX()