def get_pairs_with_orders(addresses=[], max_pairs=12): pairs_with_orders = [] sources = '''AND source IN ({})'''.format(','.join(['?' for e in range(0,len(addresses))])) sql = '''SELECT (MIN(give_asset, get_asset) || '/' || MAX(give_asset, get_asset)) AS pair, COUNT(*) AS order_count FROM orders WHERE give_asset != get_asset AND status = ? {} GROUP BY pair ORDER BY order_count DESC LIMIT ?'''.format(sources) bindings = ['open'] + addresses + [max_pairs] my_pairs = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] for my_pair in my_pairs: base_asset, quote_asset = util.assets_to_asset_pair(*tuple(my_pair['pair'].split("/"))) top_pair = { 'base_asset': base_asset, 'quote_asset': quote_asset, 'my_order_count': my_pair['order_count'] } if my_pair['pair'] == 'WDC/XBJ': # XBJ/WDC always in first pairs_with_orders.insert(0, top_pair) else: pairs_with_orders.append(top_pair) return pairs_with_orders
def get_pairs_with_orders(addresses=[], max_pairs=12): pairs_with_orders = [] sources = """AND source IN ({})""".format(",".join(["?" for e in range(0, len(addresses))])) sql = """SELECT (MIN(give_asset, get_asset) || '/' || MAX(give_asset, get_asset)) AS pair, COUNT(*) AS order_count FROM orders WHERE give_asset != get_asset AND status = ? {} GROUP BY pair ORDER BY order_count DESC LIMIT ?""".format( sources ) bindings = ["open"] + addresses + [max_pairs] my_pairs = util.call_jsonrpc_api("sql", {"query": sql, "bindings": bindings})["result"] for my_pair in my_pairs: base_asset, quote_asset = util.assets_to_asset_pair(*tuple(my_pair["pair"].split("/"))) top_pair = {"base_asset": base_asset, "quote_asset": quote_asset, "my_order_count": my_pair["order_count"]} if my_pair["pair"] == "DOGE/XDP": # XDP/DOGE always in first pairs_with_orders.insert(0, top_pair) else: pairs_with_orders.append(top_pair) return pairs_with_orders
def get_pairs_with_orders(addresses=[], max_pairs=12): pairs_with_orders = [] sources = '''AND source IN ({})'''.format(','.join(['?' for e in range(0,len(addresses))])) sql = '''SELECT (MIN(give_asset, get_asset) || '/' || MAX(give_asset, get_asset)) AS pair, COUNT(*) AS order_count FROM orders WHERE give_asset != get_asset AND status = ? {} GROUP BY pair ORDER BY order_count DESC LIMIT ?'''.format(sources) bindings = ['open'] + addresses + [max_pairs] my_pairs = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] for my_pair in my_pairs: base_asset, quote_asset = util.assets_to_asset_pair(*tuple(my_pair['pair'].split("/"))) top_pair = { 'base_asset': base_asset, 'quote_asset': quote_asset, 'my_order_count': my_pair['order_count'] } if my_pair['pair'] == 'DOGE/XDP': # XDP/DOGE always in first pairs_with_orders.insert(0, top_pair) else: pairs_with_orders.append(top_pair) return pairs_with_orders
def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provided=0.95, max_fee_required=0.95): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_orders = [] sql = '''SELECT orders.*, blocks.block_time FROM orders INNER JOIN blocks ON orders.block_index=blocks.block_index WHERE status = ? ''' bindings = ['open'] if len(addresses) > 0: sql += '''AND source IN ({}) '''.format(','.join( ['?' for e in range(0, len(addresses))])) bindings += addresses sql += '''AND give_remaining > 0 AND give_asset IN (?, ?) AND get_asset IN (?, ?) ORDER BY tx_index DESC''' bindings += [asset1, asset2, asset1, asset2] orders = util.call_jsonrpc_api('sql', { 'query': sql, 'bindings': bindings })['result'] for order in orders: user_order = {} exclude = False if order['give_asset'] == 'XTO': try: fee_provided = order['fee_provided'] / ( order['give_quantity'] / 100) user_order['fee_provided'] = format( D(order['fee_provided']) / (D(order['give_quantity']) / D(100)), '.2f') except Exception, e: fee_provided = min_fee_provided - 1 # exclude exclude = fee_provided < min_fee_provided elif order['get_asset'] == 'XTO': try: fee_required = order['fee_required'] / (order['get_quantity'] / 100) user_order['fee_required'] = format( D(order['fee_required']) / (D(order['get_quantity']) / D(100)), '.2f') except Exception, e: fee_required = max_fee_required + 1 # exclude
def get_market_details(asset1, asset2, min_fee_provided=0.95, max_fee_required=0.95, mongo_db=None): yesterday = int(time.time() - (24 * 60 * 60)) base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) supplies = get_assets_supply([base_asset, quote_asset]) price, trend, price24h, progression = get_price_movement(base_asset, quote_asset, supplies=supplies) buy_orders = [] sell_orders = [] market_orders = get_market_orders(base_asset, quote_asset, supplies=supplies, min_fee_provided=min_fee_provided, max_fee_required=max_fee_required) for order in market_orders: if order['type'] == 'SELL': sell_orders.append(order) elif order['type'] == 'BUY': buy_orders.append(order) last_trades = get_market_trades(base_asset, quote_asset, supplies=supplies) ext_info = False if mongo_db: ext_info = mongo_db.asset_extended_info.find_one({'asset': base_asset}, {'_id': 0}) if ext_info and 'info_data' in ext_info: ext_info = ext_info['info_data'] else: ext_info = False return { 'base_asset': base_asset, 'quote_asset': quote_asset, 'price': format(price, ".8f"), 'trend': trend, 'progression': format(progression, ".2f"), 'price_24h': format(price24h, ".8f"), 'supply': supplies[base_asset][0], 'base_asset_divisible': supplies[base_asset][1], 'quote_asset_divisible': supplies[quote_asset][1], 'buy_orders': sorted(buy_orders, key=lambda x: x['price'], reverse=True), 'sell_orders': sorted(sell_orders, key=lambda x: x['price']), 'last_trades': last_trades, 'base_asset_infos': ext_info }
def get_market_price_summary(asset1, asset2, with_last_trades=0, start_dt=None, end_dt=None): """Gets a synthesized trading "market price" for a specified asset pair (if available), as well as additional info. If no price is available, False is returned. """ mongo_db = config.mongo_db if not end_dt: end_dt = datetime.datetime.utcnow() if not start_dt: start_dt = end_dt - datetime.timedelta(days=10) #default to 10 days in the past #look for the last max 6 trades within the past 10 day window base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) base_asset_info = mongo_db.tracked_assets.find_one({'asset': base_asset}) quote_asset_info = mongo_db.tracked_assets.find_one({'asset': quote_asset}) if not isinstance(with_last_trades, int) or with_last_trades < 0 or with_last_trades > 30: raise Exception("Invalid with_last_trades") if not base_asset_info or not quote_asset_info: raise Exception("Invalid asset(s)") last_trades = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset, 'block_time': { "$gte": start_dt, "$lte": end_dt } }, {'_id': 0, 'block_index': 1, 'block_time': 1, 'unit_price': 1, 'base_quantity_normalized': 1, 'quote_quantity_normalized': 1} ).sort("block_time", pymongo.DESCENDING).limit(max(config.MARKET_PRICE_DERIVE_NUM_POINTS, with_last_trades)) if not last_trades.count(): return None #no suitable trade data to form a market price (return None, NOT False here) last_trades = list(last_trades) last_trades.reverse() #from newest to oldest market_price = get_market_price( [last_trades[i]['unit_price'] for i in xrange(min(len(last_trades), config.MARKET_PRICE_DERIVE_NUM_POINTS))], [(last_trades[i]['base_quantity_normalized'] + last_trades[i]['quote_quantity_normalized']) for i in xrange(min(len(last_trades), config.MARKET_PRICE_DERIVE_NUM_POINTS))]) result = { 'market_price': float(D(market_price)), 'base_asset': base_asset, 'quote_asset': quote_asset, } if with_last_trades: #[0]=block_time, [1]=unit_price, [2]=base_quantity_normalized, [3]=quote_quantity_normalized, [4]=block_index result['last_trades'] = [[ t['block_time'], t['unit_price'], t['base_quantity_normalized'], t['quote_quantity_normalized'], t['block_index'] ] for t in last_trades] else: result['last_trades'] = [] return result
def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provided=0.95, max_fee_required=0.95): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_orders = [] sql = """SELECT orders.*, blocks.block_time FROM orders INNER JOIN blocks ON orders.block_index=blocks.block_index WHERE status = ? """ bindings = ["open"] if len(addresses) > 0: sql += """AND source IN ({}) """.format(",".join(["?" for e in range(0, len(addresses))])) bindings += addresses sql += """AND give_remaining > 0 AND give_asset IN (?, ?) AND get_asset IN (?, ?) ORDER BY tx_index DESC""" bindings += [asset1, asset2, asset1, asset2] orders = util.call_jsonrpc_api("sql", {"query": sql, "bindings": bindings})["result"] for order in orders: user_order = {} exclude = False if order["give_asset"] == "DOGE": try: fee_provided = order["fee_provided"] / (order["give_quantity"] / 100) user_order["fee_provided"] = format( D(order["fee_provided"]) / (D(order["give_quantity"]) / D(100)), ".2f" ) except Exception, e: fee_provided = min_fee_provided - 1 # exclude exclude = fee_provided < min_fee_provided elif order["get_asset"] == "DOGE": try: fee_required = order["fee_required"] / (order["get_quantity"] / 100) user_order["fee_required"] = format( D(order["fee_required"]) / (D(order["get_quantity"]) / D(100)), ".2f" ) except Exception, e: fee_required = max_fee_required + 1 # exclude
def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provided=0.95, max_fee_required=0.95): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_orders = [] buy_orders = [] sell_orders = [] sql = '''SELECT orders.*, blocks.block_time FROM orders INNER JOIN blocks ON orders.block_index=blocks.block_index WHERE status = ? ''' bindings = ['open'] if len(addresses) > 0: sql += '''AND source IN ({}) '''.format(','.join(['?' for e in range(0,len(addresses))])) bindings += addresses sql += '''AND give_remaining > 0 AND give_asset IN (?, ?) AND get_asset IN (?, ?) ORDER BY tx_index DESC''' bindings += [asset1, asset2, asset1, asset2] orders = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] for order in orders: market_order = {} exclude = False if order['give_asset'] == 'WDC': try: fee_provided = order['fee_provided'] / (order['give_quantity'] / 100) market_order['fee_provided'] = format(D(order['fee_provided']) / (D(order['give_quantity']) / D(100)), '.2f') except Exception, e: fee_provided = min_fee_provided - 1 # exclude exclude = fee_provided < min_fee_provided elif order['get_asset'] == 'WDC': try: fee_required = order['fee_required'] / (order['get_quantity'] / 100) market_order['fee_required'] = format(D(order['fee_required']) / (D(order['get_quantity']) / D(100)), '.2f') except Exception, e: fee_required = max_fee_required + 1 # exclude
def get_market_details(asset1, asset2, min_fee_provided=0.95, max_fee_required=0.95, mongo_db=None): yesterday = int(time.time() - (24 * 60 * 60)) base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) supplies = get_assets_supply([base_asset, quote_asset]) price, trend, price24h, progression = get_price_movement(base_asset, quote_asset, supplies=supplies) buy_orders = [] sell_orders = [] market_orders = get_market_orders( base_asset, quote_asset, supplies=supplies, min_fee_provided=min_fee_provided, max_fee_required=max_fee_required ) for order in market_orders: if order["type"] == "SELL": sell_orders.append(order) elif order["type"] == "BUY": buy_orders.append(order) last_trades = get_market_trades(base_asset, quote_asset, supplies=supplies) ext_info = False if mongo_db: ext_info = mongo_db.asset_extended_info.find_one({"asset": base_asset}, {"_id": 0}) if ext_info and "info_data" in ext_info: ext_info = ext_info["info_data"] else: ext_info = False return { "base_asset": base_asset, "quote_asset": quote_asset, "price": format(price, ".8f"), "trend": trend, "progression": format(progression, ".2f"), "price_24h": format(price24h, ".8f"), "supply": supplies[base_asset][0], "base_asset_divisible": supplies[base_asset][1], "quote_asset_divisible": supplies[quote_asset][1], "buy_orders": sorted(buy_orders, key=lambda x: x["price"], reverse=True), "sell_orders": sorted(sell_orders, key=lambda x: x["price"]), "last_trades": last_trades, "base_asset_infos": ext_info, }
def get_market_details(asset1, asset2, min_fee_provided=0.95, max_fee_required=0.95, mongo_db=None): yesterday = int(time.time() - (24*60*60)) base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) supplies = get_assets_supply([base_asset, quote_asset]) price, trend, price24h, progression = get_price_movement(base_asset, quote_asset, supplies=supplies) buy_orders = [] sell_orders = [] market_orders = get_market_orders(base_asset, quote_asset, supplies=supplies, min_fee_provided=min_fee_provided, max_fee_required=max_fee_required) for order in market_orders: if order['type'] == 'SELL': sell_orders.append(order) elif order['type'] == 'BUY': buy_orders.append(order) last_trades = get_market_trades(base_asset, quote_asset, supplies=supplies) ext_info = False if mongo_db: ext_info = mongo_db.asset_extended_info.find_one({'asset': base_asset}, {'_id': 0}) if ext_info and 'info_data' in ext_info: ext_info = ext_info['info_data'] else: ext_info = False return { 'base_asset': base_asset, 'quote_asset': quote_asset, 'price': format(price, ".8f"), 'trend': trend, 'progression': format(progression, ".2f"), 'price_24h': format(price24h, ".8f"), 'supply': supplies[base_asset][0], 'base_asset_divisible': supplies[base_asset][1], 'quote_asset_divisible': supplies[quote_asset][1], 'buy_orders': sorted(buy_orders, key=lambda x: D(x['price']), reverse=True), 'sell_orders': sorted(sell_orders, key=lambda x: D(x['price'])), 'last_trades': last_trades, 'base_asset_infos': ext_info }
def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provided=0.95, max_fee_required=0.95): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_orders = [] sql = '''SELECT orders.*, blocks.block_time FROM orders INNER JOIN blocks ON orders.block_index=blocks.block_index WHERE status = ? ''' bindings = ['open'] if len(addresses) > 0: sql += '''AND source IN ({}) '''.format(','.join( ['?' for e in range(0, len(addresses))])) bindings += addresses sql += '''AND give_remaining > 0 AND give_asset IN (?, ?) AND get_asset IN (?, ?) ORDER BY tx_index DESC''' bindings += [asset1, asset2, asset1, asset2] orders = util.call_jsonrpc_api('sql', { 'query': sql, 'bindings': bindings })['result'] for order in orders: user_order = {} exclude = False if order['give_asset'] == 'PEPE': try: fee_provided = order['fee_provided'] / ( order['give_quantity'] / 100) user_order['fee_provided'] = format( D(order['fee_provided']) / (D(order['give_quantity']) / D(100)), '.2f') except Exception as e: fee_provided = min_fee_provided - 1 # exclude exclude = fee_provided < min_fee_provided elif order['get_asset'] == 'PEPE': try: fee_required = order['fee_required'] / (order['get_quantity'] / 100) user_order['fee_required'] = format( D(order['fee_required']) / (D(order['get_quantity']) / D(100)), '.2f') except Exception as e: fee_required = max_fee_required + 1 # exclude exclude = fee_required > max_fee_required if not exclude: if order['give_asset'] == base_asset: price = calculate_price(order['give_quantity'], order['get_quantity'], supplies[order['give_asset']][1], supplies[order['get_asset']][1]) user_order['type'] = 'SELL' user_order['amount'] = order['give_remaining'] user_order['total'] = int(order['give_remaining'] * price) else: price = calculate_price(order['get_quantity'], order['give_quantity'], supplies[order['get_asset']][1], supplies[order['give_asset']][1]) user_order['type'] = 'BUY' user_order['total'] = order['give_remaining'] user_order['amount'] = int(order['give_remaining'] / price) user_order['price'] = format(price, '.8f') if len(addresses) == 0 and len(market_orders) > 0: previous_order = market_orders[-1] if previous_order['type'] == user_order[ 'type'] and previous_order['price'] == user_order[ 'price']: market_orders[-1]['amount'] += user_order['amount'] market_orders[-1]['total'] += user_order['total'] exclude = True if len(addresses) > 0: completed = format( ((D(order['give_quantity']) - D(order['give_remaining'])) / D(order['give_quantity'])) * D(100), '.2f') user_order['completion'] = "{}%".format(completed) user_order['tx_index'] = order['tx_index'] user_order['tx_hash'] = order['tx_hash'] user_order['source'] = order['source'] user_order['block_index'] = order['block_index'] user_order['block_time'] = order['block_time'] if not exclude: market_orders.append(user_order) return market_orders
def process_cpd_blockfeed(zmq_publisher_eventfeed): LATEST_BLOCK_INIT = { 'block_index': config.BLOCK_FIRST, 'block_time': None, 'block_hash': None } mongo_db = config.mongo_db blocks_to_insert = [] def blow_away_db(): """boom! blow away all applicable collections in mongo""" mongo_db.processed_blocks.drop() mongo_db.tracked_assets.drop() mongo_db.trades.drop() mongo_db.balance_changes.drop() mongo_db.asset_market_info.drop() mongo_db.asset_marketcap_history.drop() mongo_db.pair_market_info.drop() mongo_db.btc_open_orders.drop() mongo_db.asset_extended_info.drop() mongo_db.transaction_stats.drop() mongo_db.feeds.drop() mongo_db.wallet_stats.drop() #create/update default app_config object mongo_db.app_config.update( {}, { 'db_version': config.DB_VERSION, #counterblockd database version 'running_testnet': config.TESTNET, 'counterpartyd_db_version_major': None, 'counterpartyd_db_version_minor': None, 'counterpartyd_running_testnet': None, 'last_block_assets_compiled': config. BLOCK_FIRST, #for asset data compilation in events.py (resets on reparse as well) }, upsert=True) app_config = mongo_db.app_config.find()[0] #DO NOT DELETE preferences and chat_handles and chat_history #create XCP and BTC assets in tracked_assets for asset in [config.XCP, config.BTC]: base_asset = { 'asset': asset, 'owner': None, 'divisible': True, 'locked': False, 'total_issued': None, '_at_block': config.BLOCK_FIRST, #the block ID this asset is current for '_history': [] #to allow for block rollbacks } mongo_db.tracked_assets.insert(base_asset) #reinitialize some internal counters config.CURRENT_BLOCK_INDEX = 0 config.LAST_MESSAGE_INDEX = -1 return app_config def prune_my_stale_blocks(max_block_index): """called if there are any records for blocks higher than this in the database? If so, they were impartially created and we should get rid of them NOTE: after calling this function, you should always trigger a "continue" statement to reiterate the processing loop (which will get a new last_processed_block from counterpartyd and resume as appropriate) """ logging.warn("Pruning to block %i ..." % (max_block_index)) mongo_db.processed_blocks.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.balance_changes.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.trades.remove({"block_index": {"$gt": max_block_index}}) mongo_db.asset_marketcap_history.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.transaction_stats.remove( {"block_index": { "$gt": max_block_index }}) #to roll back the state of the tracked asset, dive into the history object for each asset that has # been updated on or after the block that we are pruning back to assets_to_prune = mongo_db.tracked_assets.find( {'_at_block': { "$gt": max_block_index }}) for asset in assets_to_prune: logging.info( "Pruning asset %s (last modified @ block %i, pruning to state at block %i)" % (asset['asset'], asset['_at_block'], max_block_index)) prev_ver = None while len(asset['_history']): prev_ver = asset['_history'].pop() if prev_ver['_at_block'] <= max_block_index: break if prev_ver: if prev_ver['_at_block'] > max_block_index: #even the first history version is newer than max_block_index. #in this case, just remove the asset tracking record itself mongo_db.tracked_assets.remove({'asset': asset['asset']}) else: #if here, we were able to find a previous version that was saved at or before max_block_index # (which should be prev_ver ... restore asset's values to its values prev_ver['_id'] = asset['_id'] prev_ver['_history'] = asset['_history'] mongo_db.tracked_assets.save(prev_ver) config.LAST_MESSAGE_INDEX = -1 config.CAUGHT_UP = False util.blockinfo_cache.clear() latest_block = mongo_db.processed_blocks.find_one( {"block_index": max_block_index}) or LATEST_BLOCK_INIT return latest_block def publish_mempool_tx(): """fetch new tx from mempool""" tx_hashes = [] mempool_txs = mongo_db.mempool.find(fields={'tx_hash': True}) for mempool_tx in mempool_txs: tx_hashes.append(str(mempool_tx['tx_hash'])) params = None if len(tx_hashes) > 0: params = { 'filters': [{ 'field': 'tx_hash', 'op': 'NOT IN', 'value': tx_hashes }, { 'field': 'category', 'op': 'IN', 'value': [ 'sends', 'btcpays', 'issuances', 'dividends', 'callbacks' ] }], 'filterop': 'AND' } new_txs = util.call_jsonrpc_api("get_mempool", params, abort_on_error=True) for new_tx in new_txs['result']: tx = { 'tx_hash': new_tx['tx_hash'], 'command': new_tx['command'], 'category': new_tx['category'], 'bindings': new_tx['bindings'], 'timestamp': new_tx['timestamp'], 'viewed_in_block': config.CURRENT_BLOCK_INDEX } mongo_db.mempool.insert(tx) del (tx['_id']) tx['_category'] = tx['category'] tx['_message_index'] = 'mempool' logging.debug("Spotted mempool tx: %s" % tx) zmq_publisher_eventfeed.send_json(tx) def clean_mempool_tx(): """clean mempool transactions older than MAX_REORG_NUM_BLOCKS blocks""" mongo_db.mempool.remove({ "viewed_in_block": { "$lt": config.CURRENT_BLOCK_INDEX - config.MAX_REORG_NUM_BLOCKS } }) config.CURRENT_BLOCK_INDEX = 0 #initialize (last processed block index -- i.e. currently active block) config.LAST_MESSAGE_INDEX = -1 #initialize (last processed message index) config.BLOCKCHAIN_SERVICE_LAST_BLOCK = 0 #simply for printing/alerting purposes config.CAUGHT_UP_STARTED_EVENTS = False #^ set after we are caught up and start up the recurring events that depend on us being caught up with the blockchain #grab our stored preferences, and rebuild the database if necessary app_config = mongo_db.app_config.find() assert app_config.count() in [0, 1] if (app_config.count() == 0 or config.REPARSE_FORCED or app_config[0]['db_version'] != config.DB_VERSION or app_config[0]['running_testnet'] != config.TESTNET): if app_config.count(): logging.warn( "counterblockd database version UPDATED (from %i to %i) or testnet setting changed (from %s to %s), or REINIT forced (%s). REBUILDING FROM SCRATCH ..." % (app_config[0]['db_version'], config.DB_VERSION, app_config[0]['running_testnet'], config.TESTNET, config.REPARSE_FORCED)) else: logging.warn( "counterblockd database app_config collection doesn't exist. BUILDING FROM SCRATCH..." ) app_config = blow_away_db() my_latest_block = LATEST_BLOCK_INIT else: app_config = app_config[0] #get the last processed block out of mongo my_latest_block = mongo_db.processed_blocks.find_one( sort=[("block_index", pymongo.DESCENDING)]) or LATEST_BLOCK_INIT #remove any data we have for blocks higher than this (would happen if counterblockd or mongo died # or errored out while processing a block) my_latest_block = prune_my_stale_blocks(my_latest_block['block_index']) #avoid contacting counterpartyd (on reparse, to speed up) autopilot = False autopilot_runner = 0 #start polling counterpartyd for new blocks while True: if not autopilot or autopilot_runner == 0: try: running_info = util.call_jsonrpc_api("get_running_info", abort_on_error=True) if 'result' not in running_info: raise AssertionError("Could not contact counterpartyd") running_info = running_info['result'] except Exception, e: logging.warn( str(e) + " -- Waiting 3 seconds before trying again...") time.sleep(3) continue if running_info[ 'last_message_index'] == -1: #last_message_index not set yet (due to no messages in counterpartyd DB yet) logging.warn( "No last_message_index returned. Waiting until counterpartyd has messages..." ) time.sleep(10) continue #wipe our state data if necessary, if counterpartyd has moved on to a new DB version wipeState = False updatePrefs = False if app_config['counterpartyd_db_version_major'] is None \ or app_config['counterpartyd_db_version_minor'] is None \ or app_config['counterpartyd_running_testnet'] is None: updatePrefs = True elif running_info['version_major'] != app_config[ 'counterpartyd_db_version_major']: logging.warn( "counterpartyd MAJOR DB version change (we built from %s, counterpartyd is at %s). Wiping our state data." % (app_config['counterpartyd_db_version_major'], running_info['version_major'])) wipeState = True updatePrefs = True elif running_info['version_minor'] != app_config[ 'counterpartyd_db_version_minor']: logging.warn( "counterpartyd MINOR DB version change (we built from %s.%s, counterpartyd is at %s.%s). Wiping our state data." % (app_config['counterpartyd_db_version_major'], app_config['counterpartyd_db_version_minor'], running_info['version_major'], running_info['version_minor'])) wipeState = True updatePrefs = True elif running_info.get( 'running_testnet', False) != app_config['counterpartyd_running_testnet']: logging.warn( "counterpartyd testnet setting change (from %s to %s). Wiping our state data." % (app_config['counterpartyd_running_testnet'], running_info['running_testnet'])) wipeState = True updatePrefs = True if wipeState: app_config = blow_away_db() if updatePrefs: app_config['counterpartyd_db_version_major'] = running_info[ 'version_major'] app_config['counterpartyd_db_version_minor'] = running_info[ 'version_minor'] app_config['counterpartyd_running_testnet'] = running_info[ 'running_testnet'] mongo_db.app_config.update({}, app_config) #reset my latest block record my_latest_block = LATEST_BLOCK_INIT config.CAUGHT_UP = False #You've Come a Long Way, Baby #work up to what block counterpartyd is at last_processed_block = running_info['last_block'] if last_processed_block['block_index'] is None: logging.warn( "counterpartyd has no last processed block (probably is reparsing). Waiting 3 seconds before trying again..." ) time.sleep(3) continue if my_latest_block['block_index'] < last_processed_block['block_index']: #need to catch up config.CAUGHT_UP = False if last_processed_block['block_index'] - my_latest_block[ 'block_index'] > 500: #we are safely far from the tip, switch to bulk-everything autopilot = True if autopilot_runner == 0: autopilot_runner = 500 autopilot_runner -= 1 else: autopilot = False cur_block_index = my_latest_block['block_index'] + 1 try: cur_block = util.get_block_info_cached( cur_block_index, min( 200, last_processed_block['block_index'] - my_latest_block['block_index'])) block_data = cur_block['_messages'] except Exception, e: logging.warn( str(e) + " Waiting 3 seconds before trying again...") time.sleep(3) continue cur_block['block_time_obj'] = datetime.datetime.utcfromtimestamp( cur_block['block_time']) cur_block['block_time_str'] = cur_block[ 'block_time_obj'].isoformat() # clean api cache if last_processed_block[ 'block_index'] - cur_block_index <= config.MAX_REORG_NUM_BLOCKS: #only when we are near the tip util.clean_block_cache(cur_block_index) #parse out response (list of txns, ordered as they appeared in the block) for msg in block_data: msg_data = json.loads(msg['bindings']) if msg['message_index'] != config.LAST_MESSAGE_INDEX + 1 and config.LAST_MESSAGE_INDEX != -1: logging.error( "BUG: MESSAGE RECEIVED NOT WHAT WE EXPECTED. EXPECTED: %s, GOT: %s: %s (ALL MSGS IN get_messages PAYLOAD: %s)..." % (config.LAST_MESSAGE_INDEX + 1, msg['message_index'], msg, [m['message_index'] for m in block_data])) # we are likely cojones deep in desync, enforcing deep reorg my_latest_block = prune_my_stale_blocks( cur_block_index - config.MAX_FORCED_REORG_NUM_BLOCKS) break #sys.exit(1) #FOR NOW #BUG: sometimes counterpartyd seems to return OLD messages out of the message feed. deal with those #TODO unreachable now, delete? if msg['message_index'] <= config.LAST_MESSAGE_INDEX: logging.warn("BUG: IGNORED old RAW message %s: %s ..." % (msg['message_index'], msg)) continue logging.info("Received message %s: %s ..." % (msg['message_index'], msg)) #don't process invalid messages, but do forward them along to clients status = msg_data.get('status', 'valid').lower() if status.startswith('invalid'): #(but don't forward along while we're catching up) if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: event = util.decorate_message_for_feed( msg, msg_data=msg_data) zmq_publisher_eventfeed.send_json(event) config.LAST_MESSAGE_INDEX = msg['message_index'] continue #track message types, for compiling of statistics if msg['command'] == 'insert' \ and msg['category'] not in ["debits", "credits", "order_matches", "bet_matches", "order_expirations", "bet_expirations", "order_match_expirations", "bet_match_expirations", "rps_matches", "rps_expirations", "rps_match_expirations", "bet_match_resolutions"]: mongo_db.transaction_stats.insert({ 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'message_index': msg['message_index'], 'category': msg['category'] }) #HANDLE REORGS if msg['command'] == 'reorg': logging.warn("Blockchain reorginization at block %s" % msg_data['block_index']) #prune back to and including the specified message_index my_latest_block = prune_my_stale_blocks( msg_data['block_index'] - 1) config.CURRENT_BLOCK_INDEX = msg_data['block_index'] - 1 #for the current last_message_index (which could have gone down after the reorg), query counterpartyd running_info = util.call_jsonrpc_api( "get_running_info", abort_on_error=True)['result'] config.LAST_MESSAGE_INDEX = running_info[ 'last_message_index'] #send out the message to listening clients (but don't forward along while we're catching up) if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: msg_data[ '_last_message_index'] = config.LAST_MESSAGE_INDEX event = util.decorate_message_for_feed( msg, msg_data=msg_data) zmq_publisher_eventfeed.send_json(event) break #break out of inner loop #track assets if msg['category'] == 'issuances': assets.parse_issuance(mongo_db, msg_data, cur_block_index, cur_block) #track balance changes for each address bal_change = None if msg['category'] in [ 'credits', 'debits', ]: actionName = 'credit' if msg[ 'category'] == 'credits' else 'debit' address = msg_data['address'] asset_info = mongo_db.tracked_assets.find_one( {'asset': msg_data['asset']}) if asset_info is None: logging.warn( "Credit/debit of %s where asset ('%s') does not exist. Ignoring..." % (msg_data['quantity'], msg_data['asset'])) config.LAST_MESSAGE_INDEX = msg['message_index'] continue quantity = msg_data['quantity'] if msg[ 'category'] == 'credits' else -msg_data['quantity'] quantity_normalized = util_bitcoin.normalize_quantity( quantity, asset_info['divisible']) #look up the previous balance to go off of last_bal_change = mongo_db.balance_changes.find_one( { 'address': address, 'asset': asset_info['asset'] }, sort=[("block_index", pymongo.DESCENDING), ("_id", pymongo.DESCENDING)]) if last_bal_change \ and last_bal_change['block_index'] == cur_block_index: #modify this record, as we want at most one entry per block index for each (address, asset) pair last_bal_change['quantity'] += quantity last_bal_change[ 'quantity_normalized'] += quantity_normalized last_bal_change['new_balance'] += quantity last_bal_change[ 'new_balance_normalized'] += quantity_normalized mongo_db.balance_changes.save(last_bal_change) logging.info( "Procesed %s bal change (UPDATED) from tx %s :: %s" % (actionName, msg['message_index'], last_bal_change)) bal_change = last_bal_change else: #new balance change record for this block bal_change = { 'address': address, 'asset': asset_info['asset'], 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'quantity': quantity, 'quantity_normalized': quantity_normalized, 'new_balance': last_bal_change['new_balance'] + quantity if last_bal_change else quantity, 'new_balance_normalized': last_bal_change['new_balance_normalized'] + quantity_normalized if last_bal_change else quantity_normalized, } mongo_db.balance_changes.insert(bal_change) logging.info( "Procesed %s bal change from tx %s :: %s" % (actionName, msg['message_index'], bal_change)) #book trades if (msg['category'] == 'order_matches' and (( msg['command'] == 'update' and msg_data['status'] == 'completed' ) #for a trade with BTC involved, but that is settled (completed) or ('forward_asset' in msg_data and msg_data['forward_asset'] != config.BTC and msg_data['backward_asset'] != config.BTC)) ): #or for a trade without BTC on either end if msg['command'] == 'update' and msg_data[ 'status'] == 'completed': #an order is being updated to a completed status (i.e. a BTCpay has completed) tx0_hash, tx1_hash = msg_data[ 'order_match_id'][:64], msg_data['order_match_id'][ 64:] #get the order_match this btcpay settles order_match = util.call_jsonrpc_api( "get_order_matches", { 'filters': [{ 'field': 'tx0_hash', 'op': '==', 'value': tx0_hash }, { 'field': 'tx1_hash', 'op': '==', 'value': tx1_hash }] }, abort_on_error=True)['result'][0] else: assert msg_data[ 'status'] == 'completed' #should not enter a pending state for non BTC matches order_match = msg_data forward_asset_info = mongo_db.tracked_assets.find_one( {'asset': order_match['forward_asset']}) backward_asset_info = mongo_db.tracked_assets.find_one( {'asset': order_match['backward_asset']}) assert forward_asset_info and backward_asset_info base_asset, quote_asset = util.assets_to_asset_pair( order_match['forward_asset'], order_match['backward_asset']) #don't create trade records from order matches with BTC that are under the dust limit if (order_match['forward_asset'] == config.BTC and order_match['forward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF) \ or (order_match['backward_asset'] == config.BTC and order_match['backward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF): logging.debug( "Order match %s ignored due to %s under dust limit." % (order_match['tx0_hash'] + order_match['tx1_hash'], config.BTC)) config.LAST_MESSAGE_INDEX = msg['message_index'] continue #take divisible trade quantities to floating point forward_quantity = util_bitcoin.normalize_quantity( order_match['forward_quantity'], forward_asset_info['divisible']) backward_quantity = util_bitcoin.normalize_quantity( order_match['backward_quantity'], backward_asset_info['divisible']) #compose trade trade = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'message_index': msg['message_index'], #secondary temporaral ordering off of when 'order_match_id': order_match['tx0_hash'] + order_match['tx1_hash'], 'order_match_tx0_index': order_match['tx0_index'], 'order_match_tx1_index': order_match['tx1_index'], 'order_match_tx0_address': order_match['tx0_address'], 'order_match_tx1_address': order_match['tx1_address'], 'base_asset': base_asset, 'quote_asset': quote_asset, 'base_quantity': order_match['forward_quantity'] if order_match['forward_asset'] == base_asset else order_match['backward_quantity'], 'quote_quantity': order_match['backward_quantity'] if order_match['forward_asset'] == base_asset else order_match['forward_quantity'], 'base_quantity_normalized': forward_quantity if order_match['forward_asset'] == base_asset else backward_quantity, 'quote_quantity_normalized': backward_quantity if order_match['forward_asset'] == base_asset else forward_quantity, } trade['unit_price'] = float( (D(trade['quote_quantity_normalized']) / D(trade['base_quantity_normalized'])).quantize( D('.00000000'), rounding=decimal.ROUND_HALF_EVEN)) trade['unit_price_inverse'] = float( (D(trade['base_quantity_normalized']) / D(trade['quote_quantity_normalized'])).quantize( D('.00000000'), rounding=decimal.ROUND_HALF_EVEN)) mongo_db.trades.insert(trade) logging.info("Procesed Trade from tx %s :: %s" % (msg['message_index'], trade)) #broadcast if msg['category'] == 'broadcasts': betting.parse_broadcast(mongo_db, msg_data) #if we're catching up beyond MAX_REORG_NUM_BLOCKS blocks out, make sure not to send out any socket.io # events, as to not flood on a resync (as we may give a 525 to kick the logged in clients out, but we # can't guarantee that the socket.io connection will always be severed as well??) if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: #send out the message to listening clients event = util.decorate_message_for_feed(msg, msg_data=msg_data) zmq_publisher_eventfeed.send_json(event) #this is the last processed message index config.LAST_MESSAGE_INDEX = msg['message_index'] else: #block successfully processed, track this in our DB new_block = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'block_hash': cur_block['block_hash'], } blocks_to_insert.append(new_block) if last_processed_block[ 'block_index'] - cur_block_index > 1000: #reparsing, do bulk inserts if len(blocks_to_insert) >= 1000: mongo_db.processed_blocks.insert(blocks_to_insert) blocks_to_insert[:] = [] else: mongo_db.processed_blocks.insert(blocks_to_insert) blocks_to_insert[:] = [] my_latest_block = new_block config.CURRENT_BLOCK_INDEX = cur_block_index #get the current blockchain service block if config.BLOCKCHAIN_SERVICE_LAST_BLOCK == 0 or config.BLOCKCHAIN_SERVICE_LAST_BLOCK - config.CURRENT_BLOCK_INDEX < config.MAX_REORG_NUM_BLOCKS: #update as CURRENT_BLOCK_INDEX catches up with BLOCKCHAIN_SERVICE_LAST_BLOCK and/or surpasses it (i.e. if blockchain service gets behind for some reason) try: block_height_response = blockchain.getinfo() except: block_height_response = None config.BLOCKCHAIN_SERVICE_LAST_BLOCK = block_height_response[ 'info']['blocks'] if block_height_response else 0 logging.info( "Block: %i (message_index height=%s) (blockchain latest block=%s)" % (config.CURRENT_BLOCK_INDEX, config.LAST_MESSAGE_INDEX if config.LAST_MESSAGE_INDEX != -1 else '???', config.BLOCKCHAIN_SERVICE_LAST_BLOCK if config.BLOCKCHAIN_SERVICE_LAST_BLOCK else '???')) if last_processed_block[ 'block_index'] - cur_block_index < config.MAX_REORG_NUM_BLOCKS: #only when we are near the tip clean_mempool_tx()
def get_market_trades(asset1, asset2, addresses=[], limit=100, supplies=None): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_trades = [] sources = '' bindings = ['expired'] if len(addresses) > 0: placeholder = ','.join(['?' for e in range(0, len(addresses))]) sources = '''AND (tx0_address IN ({}) OR tx1_address IN ({}))'''.format( placeholder, placeholder) bindings += addresses + addresses sql = '''SELECT order_matches.*, blocks.block_time FROM order_matches INNER JOIN blocks ON order_matches.block_index=blocks.block_index WHERE status != ? {} AND forward_asset IN (?, ?) AND backward_asset IN (?, ?) ORDER BY block_index DESC'''.format(sources) bindings += [asset1, asset2, asset1, asset2] order_matches = util.call_jsonrpc_api('sql', { 'query': sql, 'bindings': bindings })['result'] for order_match in order_matches: if order_match['tx0_address'] in addresses: trade = {} trade['match_id'] = order_match['id'] trade['source'] = order_match['tx0_address'] trade['countersource'] = order_match['tx1_address'] trade['block_index'] = order_match['block_index'] trade['block_time'] = order_match['block_time'] trade['status'] = order_match['status'] if order_match['forward_asset'] == base_asset: trade['type'] = 'SELL' trade['price'] = format_price( order_match['forward_quantity'], order_match['backward_quantity'], supplies[order_match['forward_asset']][1], supplies[order_match['backward_asset']][1]) trade['amount'] = order_match['forward_quantity'] trade['total'] = order_match['backward_quantity'] else: trade['type'] = 'BUY' trade['price'] = format_price( order_match['backward_quantity'], order_match['forward_quantity'], supplies[order_match['backward_asset']][1], supplies[order_match['forward_asset']][1]) trade['amount'] = order_match['backward_quantity'] trade['total'] = order_match['forward_quantity'] market_trades.append(trade) if len(addresses) == 0 or order_match['tx1_address'] in addresses: trade = {} trade['match_id'] = order_match['id'] trade['source'] = order_match['tx1_address'] trade['countersource'] = order_match['tx0_address'] trade['block_index'] = order_match['block_index'] trade['block_time'] = order_match['block_time'] trade['status'] = order_match['status'] if order_match['backward_asset'] == base_asset: trade['type'] = 'SELL' trade['price'] = format_price( order_match['backward_quantity'], order_match['forward_quantity'], supplies[order_match['backward_asset']][1], supplies[order_match['forward_asset']][1]) trade['amount'] = order_match['backward_quantity'] trade['total'] = order_match['forward_quantity'] else: trade['type'] = 'BUY' trade['price'] = format_price( order_match['forward_quantity'], order_match['backward_quantity'], supplies[order_match['forward_asset']][1], supplies[order_match['backward_asset']][1]) trade['amount'] = order_match['forward_quantity'] trade['total'] = order_match['backward_quantity'] market_trades.append(trade) return market_trades
def compile_asset_pair_market_info(): mongo_db = config.mongo_db end_dt = datetime.datetime.utcnow() start_dt = end_dt - datetime.timedelta(days=1) start_block_index, end_block_index = util.get_block_indexes_for_dates( start_dt=start_dt, end_dt=end_dt) open_orders = util.call_jsonrpc_api("get_orders", { 'filters': [ { 'field': 'give_remaining', 'op': '>', 'value': 0 }, { 'field': 'get_remaining', 'op': '>', 'value': 0 }, { 'field': 'fee_required_remaining', 'op': '>=', 'value': 0 }, { 'field': 'fee_provided_remaining', 'op': '>=', 'value': 0 }, ], 'status': 'open', 'show_expired': False, }, abort_on_error=True)['result'] pair_data = {} asset_info = {} def get_price(base_quantity_normalized, quote_quantity_normalized): return float(D(quote_quantity_normalized / base_quantity_normalized)) #COMPOSE order depth, lowest ask, and highest bid column data for o in open_orders: (base_asset, quote_asset) = util.assets_to_asset_pair(o['give_asset'], o['get_asset']) pair = '%s/%s' % (base_asset, quote_asset) base_asset_info = asset_info.get( base_asset, mongo_db.tracked_assets.find_one({'asset': base_asset})) if base_asset not in asset_info: asset_info[base_asset] = base_asset_info quote_asset_info = asset_info.get( quote_asset, mongo_db.tracked_assets.find_one({'asset': quote_asset})) if quote_asset not in asset_info: asset_info[quote_asset] = quote_asset_info pair_data.setdefault( pair, { 'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None, 'completed_trades_count': 0, 'vol_base': 0, 'vol_quote': 0 }) pair_data[pair]['open_orders_count'] += 1 base_quantity_normalized = util_bitcoin.normalize_quantity( o['give_quantity'] if base_asset == o['give_asset'] else o['get_quantity'], base_asset_info['divisible']) quote_quantity_normalized = util_bitcoin.normalize_quantity( o['give_quantity'] if quote_asset == o['give_asset'] else o['get_quantity'], quote_asset_info['divisible']) order_price = get_price(base_quantity_normalized, quote_quantity_normalized) if base_asset == o['give_asset']: #selling base if pair_data[pair]['lowest_ask'] is None or order_price < pair_data[ pair]['lowest_ask']: pair_data[pair]['lowest_ask'] = order_price elif base_asset == o['get_asset']: #buying base if pair_data[pair][ 'highest_bid'] is None or order_price > pair_data[pair][ 'highest_bid']: pair_data[pair]['highest_bid'] = order_price trades_data_by_pair = mongo_db.trades.aggregate([ { "$match": { "block_time": { "$gte": start_dt, "$lte": end_dt } } }, { "$project": { "base_asset": 1, "quote_asset": 1, "base_quantity_normalized": 1, #to derive base volume "quote_quantity_normalized": 1 #to derive quote volume } }, { "$group": { "_id": { "base_asset": "$base_asset", "quote_asset": "$quote_asset" }, "vol_base": { "$sum": "$base_quantity_normalized" }, "vol_quote": { "$sum": "$quote_quantity_normalized" }, "count": { "$sum": 1 }, } } ]) for e in trades_data_by_pair: pair = '%s/%s' % (e['_id']['base_asset'], e['_id']['quote_asset']) pair_data.setdefault(pair, { 'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None }) #^ initialize an empty pair in the event there are no open orders for that pair, but there ARE completed trades for it pair_data[pair]['completed_trades_count'] = e['count'] pair_data[pair]['vol_base'] = e['vol_base'] pair_data[pair]['vol_quote'] = e['vol_quote'] mps_xcp_btc, xcp_btc_price, btc_xcp_price = get_price_primatives() for pair, e in pair_data.items(): base_asset, quote_asset = pair.split('/') _24h_vol_in_btc = None _24h_vol_in_xcp = None if base_asset == config.XCP: _24h_vol_in_xcp = e['vol_base'] _24h_vol_in_btc = util_bitcoin.round_out( e['vol_base'] * xcp_btc_price) if xcp_btc_price else 0 elif base_asset == config.BTC: _24h_vol_in_xcp = util_bitcoin.round_out( e['vol_base'] * btc_xcp_price) if btc_xcp_price else 0 _24h_vol_in_btc = e['vol_base'] else: price_summary_in_xcp, price_summary_in_btc, price_in_xcp, price_in_btc, aggregated_price_in_xcp, aggregated_price_in_btc = \ get_xcp_btc_price_info(base_asset, mps_xcp_btc, xcp_btc_price, btc_xcp_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if price_in_xcp: _24h_vol_in_xcp = util_bitcoin.round_out(e['vol_base'] * price_in_xcp) if price_in_btc: _24h_vol_in_btc = util_bitcoin.round_out(e['vol_base'] * price_in_btc) if _24h_vol_in_xcp is None or _24h_vol_in_btc is None: price_summary_in_xcp, price_summary_in_btc, price_in_xcp, price_in_btc, aggregated_price_in_xcp, aggregated_price_in_btc = \ get_xcp_btc_price_info(quote_asset, mps_xcp_btc, xcp_btc_price, btc_xcp_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if _24h_vol_in_xcp is None and price_in_xcp: _24h_vol_in_xcp = util_bitcoin.round_out(e['vol_quote'] * price_in_xcp) if _24h_vol_in_btc is None and price_in_btc: _24h_vol_in_btc = util_bitcoin.round_out(e['vol_quote'] * price_in_btc) pair_data[pair]['24h_vol_in_{}'.format( config.XCP.lower())] = _24h_vol_in_xcp #might still be None pair_data[pair]['24h_vol_in_{}'.format( config.BTC.lower())] = _24h_vol_in_btc #might still be None #get % change stats -- start by getting the first trade directly before the 24h period starts prev_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset, "block_time": { '$lt': start_dt } }).sort('block_time', pymongo.DESCENDING).limit(1) latest_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset }).sort('block_time', pymongo.DESCENDING).limit(1) if not prev_trade.count(): #no previous trade before this 24hr period pair_data[pair]['24h_pct_change'] = None else: prev_trade = prev_trade[0] latest_trade = latest_trade[0] prev_trade_price = get_price( prev_trade['base_quantity_normalized'], prev_trade['quote_quantity_normalized']) latest_trade_price = get_price( latest_trade['base_quantity_normalized'], latest_trade['quote_quantity_normalized']) pair_data[pair]['24h_pct_change'] = ( (latest_trade_price - prev_trade_price) / prev_trade_price) * 100 pair_data[pair]['last_updated'] = end_dt mongo_db.asset_pair_market_info.update( { 'base_asset': base_asset, 'quote_asset': quote_asset }, {"$set": pair_data[pair]}, upsert=True) #remove any old pairs that were not just updated mongo_db.asset_pair_market_info.remove({'last_updated': {'$lt': end_dt}}) logging.info("Recomposed 24h trade statistics for %i asset pairs: %s" % (len(pair_data), ', '.join(list(pair_data.keys()))))
def process_cpd_blockfeed(zmq_publisher_eventfeed): LATEST_BLOCK_INIT = {'block_index': config.BLOCK_FIRST, 'block_time': None, 'block_hash': None} mongo_db = config.mongo_db blocks_to_insert = [] def blow_away_db(): """boom! blow away all applicable collections in mongo""" mongo_db.processed_blocks.drop() mongo_db.tracked_assets.drop() mongo_db.trades.drop() mongo_db.balance_changes.drop() mongo_db.asset_market_info.drop() mongo_db.asset_marketcap_history.drop() mongo_db.pair_market_info.drop() mongo_db.btc_open_orders.drop() mongo_db.asset_extended_info.drop() mongo_db.transaction_stats.drop() mongo_db.feeds.drop() mongo_db.wallet_stats.drop() #create/update default app_config object mongo_db.app_config.update({}, { 'db_version': config.DB_VERSION, #counterblockd database version 'running_testnet': config.TESTNET, 'clearinghoused_db_version_major': None, 'clearinghoused_db_version_minor': None, 'clearinghoused_running_testnet': None, 'last_block_assets_compiled': config.BLOCK_FIRST, #for asset data compilation in events.py (resets on reparse as well) }, upsert=True) app_config = mongo_db.app_config.find()[0] #DO NOT DELETE preferences and chat_handles and chat_history #create XCP and BTC assets in tracked_assets for asset in [config.XCP, config.BTC]: base_asset = { 'asset': asset, 'owner': None, 'divisible': True, 'locked': False, 'total_issued': None, '_at_block': config.BLOCK_FIRST, #the block ID this asset is current for '_history': [] #to allow for block rollbacks } mongo_db.tracked_assets.insert(base_asset) #reinitialize some internal counters config.CURRENT_BLOCK_INDEX = 0 config.LAST_MESSAGE_INDEX = -1 return app_config def prune_my_stale_blocks(max_block_index): """called if there are any records for blocks higher than this in the database? If so, they were impartially created and we should get rid of them NOTE: after calling this function, you should always trigger a "continue" statement to reiterate the processing loop (which will get a new last_processed_block from clearinghoused and resume as appropriate) """ logging.warn("Pruning to block %i ..." % (max_block_index)) mongo_db.processed_blocks.remove({"block_index": {"$gt": max_block_index}}) mongo_db.balance_changes.remove({"block_index": {"$gt": max_block_index}}) mongo_db.trades.remove({"block_index": {"$gt": max_block_index}}) mongo_db.asset_marketcap_history.remove({"block_index": {"$gt": max_block_index}}) mongo_db.transaction_stats.remove({"block_index": {"$gt": max_block_index}}) #to roll back the state of the tracked asset, dive into the history object for each asset that has # been updated on or after the block that we are pruning back to assets_to_prune = mongo_db.tracked_assets.find({'_at_block': {"$gt": max_block_index}}) for asset in assets_to_prune: logging.info("Pruning asset %s (last modified @ block %i, pruning to state at block %i)" % ( asset['asset'], asset['_at_block'], max_block_index)) prev_ver = None while len(asset['_history']): prev_ver = asset['_history'].pop() if prev_ver['_at_block'] <= max_block_index: break if prev_ver: if prev_ver['_at_block'] > max_block_index: #even the first history version is newer than max_block_index. #in this case, just remove the asset tracking record itself mongo_db.tracked_assets.remove({'asset': asset['asset']}) else: #if here, we were able to find a previous version that was saved at or before max_block_index # (which should be prev_ver ... restore asset's values to its values prev_ver['_id'] = asset['_id'] prev_ver['_history'] = asset['_history'] mongo_db.tracked_assets.save(prev_ver) config.LAST_MESSAGE_INDEX = -1 config.CAUGHT_UP = False util.blockinfo_cache.clear() latest_block = mongo_db.processed_blocks.find_one({"block_index": max_block_index}) or LATEST_BLOCK_INIT return latest_block def publish_mempool_tx(): """fetch new tx from mempool""" tx_hashes = [] mempool_txs = mongo_db.mempool.find(fields={'tx_hash': True}) for mempool_tx in mempool_txs: tx_hashes.append(str(mempool_tx['tx_hash'])) params = None if len(tx_hashes) > 0: params = { 'filters': [ {'field':'tx_hash', 'op': 'NOT IN', 'value': tx_hashes}, {'field':'category', 'op': 'IN', 'value': ['sends', 'btcpays', 'issuances', 'dividends', 'callbacks']} ], 'filterop': 'AND' } new_txs = util.call_jsonrpc_api("get_mempool", params, abort_on_error=True) for new_tx in new_txs['result']: tx = { 'tx_hash': new_tx['tx_hash'], 'command': new_tx['command'], 'category': new_tx['category'], 'bindings': new_tx['bindings'], 'timestamp': new_tx['timestamp'], 'viewed_in_block': config.CURRENT_BLOCK_INDEX } mongo_db.mempool.insert(tx) del(tx['_id']) tx['_category'] = tx['category'] tx['_message_index'] = 'mempool' logging.debug("Spotted mempool tx: %s" % tx) zmq_publisher_eventfeed.send_json(tx) def clean_mempool_tx(): """clean mempool transactions older than MAX_REORG_NUM_BLOCKS blocks""" mongo_db.mempool.remove({"viewed_in_block": {"$lt": config.CURRENT_BLOCK_INDEX - config.MAX_REORG_NUM_BLOCKS}}) config.CURRENT_BLOCK_INDEX = 0 #initialize (last processed block index -- i.e. currently active block) config.LAST_MESSAGE_INDEX = -1 #initialize (last processed message index) config.BLOCKCHAIN_SERVICE_LAST_BLOCK = 0 #simply for printing/alerting purposes config.CAUGHT_UP_STARTED_EVENTS = False #^ set after we are caught up and start up the recurring events that depend on us being caught up with the blockchain #grab our stored preferences, and rebuild the database if necessary app_config = mongo_db.app_config.find() assert app_config.count() in [0, 1] if ( app_config.count() == 0 or config.REPARSE_FORCED or app_config[0]['db_version'] != config.DB_VERSION or app_config[0]['running_testnet'] != config.TESTNET): if app_config.count(): logging.warn("clearblockd database version UPDATED (from %i to %i) or testnet setting changed (from %s to %s), or REINIT forced (%s). REBUILDING FROM SCRATCH ..." % ( app_config[0]['db_version'], config.DB_VERSION, app_config[0]['running_testnet'], config.TESTNET, config.REPARSE_FORCED)) else: logging.warn("clearblockd database app_config collection doesn't exist. BUILDING FROM SCRATCH...") app_config = blow_away_db() my_latest_block = LATEST_BLOCK_INIT else: app_config = app_config[0] #get the last processed block out of mongo my_latest_block = mongo_db.processed_blocks.find_one(sort=[("block_index", pymongo.DESCENDING)]) or LATEST_BLOCK_INIT #remove any data we have for blocks higher than this (would happen if counterblockd or mongo died # or errored out while processing a block) my_latest_block = prune_my_stale_blocks(my_latest_block['block_index']) #avoid contacting clearinghoused (on reparse, to speed up) autopilot = False autopilot_runner = 0 #start polling clearinghoused for new blocks while True: if not autopilot or autopilot_runner == 0: try: running_info = util.call_jsonrpc_api("get_running_info", abort_on_error=True) if 'result' not in running_info: raise AssertionError("Could not contact clearinghoused") running_info = running_info['result'] except Exception, e: logging.warn(str(e) + " -- Waiting 3 seconds before trying again...") time.sleep(3) continue if running_info['last_message_index'] == -1: #last_message_index not set yet (due to no messages in clearinghoused DB yet) logging.warn("No last_message_index returned. Waiting until clearinghoused has messages...") time.sleep(10) continue #wipe our state data if necessary, if clearinghoused has moved on to a new DB version wipeState = False updatePrefs = False if app_config['clearinghoused_db_version_major'] is None \ or app_config['clearinghoused_db_version_minor'] is None \ or app_config['clearinghoused_running_testnet'] is None: updatePrefs = True elif running_info['version_major'] != app_config['clearinghoused_db_version_major']: logging.warn("clearinghoused MAJOR DB version change (we built from %s, clearinghoused is at %s). Wiping our state data." % ( app_config['clearinghoused_db_version_major'], running_info['version_major'])) wipeState = True updatePrefs = True elif running_info['version_minor'] != app_config['clearinghoused_db_version_minor']: logging.warn("clearinghoused MINOR DB version change (we built from %s.%s, clearinghoused is at %s.%s). Wiping our state data." % ( app_config['clearinghoused_db_version_major'], app_config['clearinghoused_db_version_minor'], running_info['version_major'], running_info['version_minor'])) wipeState = True updatePrefs = True elif running_info.get('running_testnet', False) != app_config['clearinghoused_running_testnet']: logging.warn("clearinghoused testnet setting change (from %s to %s). Wiping our state data." % ( app_config['clearinghoused_running_testnet'], running_info['running_testnet'])) wipeState = True updatePrefs = True if wipeState: app_config = blow_away_db() if updatePrefs: app_config['clearinghoused_db_version_major'] = running_info['version_major'] app_config['clearinghoused_db_version_minor'] = running_info['version_minor'] app_config['clearinghoused_running_testnet'] = running_info['running_testnet'] mongo_db.app_config.update({}, app_config) #reset my latest block record my_latest_block = LATEST_BLOCK_INIT config.CAUGHT_UP = False #You've Come a Long Way, Baby #work up to what block clearinghoused is at last_processed_block = running_info['last_block'] if last_processed_block['block_index'] is None: logging.warn("clearinghoused has no last processed block (probably is reparsing). Waiting 3 seconds before trying again...") time.sleep(3) continue if my_latest_block['block_index'] < last_processed_block['block_index']: #need to catch up config.CAUGHT_UP = False if last_processed_block['block_index'] - my_latest_block['block_index'] > 500: #we are safely far from the tip, switch to bulk-everything autopilot = True if autopilot_runner == 0: autopilot_runner = 500 autopilot_runner -= 1 else: autopilot = False cur_block_index = my_latest_block['block_index'] + 1 try: cur_block = util.get_block_info_cached(cur_block_index, min(200, last_processed_block['block_index'] - my_latest_block['block_index'])) block_data = cur_block['_messages'] except Exception, e: logging.warn(str(e) + " Waiting 3 seconds before trying again...") time.sleep(3) continue cur_block['block_time_obj'] = datetime.datetime.utcfromtimestamp(cur_block['block_time']) cur_block['block_time_str'] = cur_block['block_time_obj'].isoformat() # clean api cache if last_processed_block['block_index'] - cur_block_index <= config.MAX_REORG_NUM_BLOCKS: #only when we are near the tip util.clean_block_cache(cur_block_index) #parse out response (list of txns, ordered as they appeared in the block) for msg in block_data: msg_data = json.loads(msg['bindings']) if msg['message_index'] != config.LAST_MESSAGE_INDEX + 1 and config.LAST_MESSAGE_INDEX != -1: logging.error("BUG: MESSAGE RECEIVED NOT WHAT WE EXPECTED. EXPECTED: %s, GOT: %s: %s (ALL MSGS IN get_messages PAYLOAD: %s)..." % ( config.LAST_MESSAGE_INDEX + 1, msg['message_index'], msg, [m['message_index'] for m in block_data])) # we are likely cojones deep in desync, enforcing deep reorg my_latest_block = prune_my_stale_blocks(cur_block_index - config.MAX_FORCED_REORG_NUM_BLOCKS) break #sys.exit(1) #FOR NOW #BUG: sometimes clearinghoused seems to return OLD messages out of the message feed. deal with those #TODO unreachable now, delete? if msg['message_index'] <= config.LAST_MESSAGE_INDEX: logging.warn("BUG: IGNORED old RAW message %s: %s ..." % (msg['message_index'], msg)) continue logging.info("Received message %s: %s ..." % (msg['message_index'], msg)) #don't process invalid messages, but do forward them along to clients status = msg_data.get('status', 'valid').lower() if status.startswith('invalid'): #(but don't forward along while we're catching up) if last_processed_block['block_index'] - my_latest_block['block_index'] < config.MAX_REORG_NUM_BLOCKS: event = util.decorate_message_for_feed(msg, msg_data=msg_data) zmq_publisher_eventfeed.send_json(event) config.LAST_MESSAGE_INDEX = msg['message_index'] continue #track message types, for compiling of statistics if msg['command'] == 'insert' \ and msg['category'] not in ["debits", "credits", "order_matches", "bet_matches", "order_expirations", "bet_expirations", "order_match_expirations", "bet_match_expirations", "rps_matches", "rps_expirations", "rps_match_expirations", "bet_match_resolutions"]: mongo_db.transaction_stats.insert({ 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'message_index': msg['message_index'], 'category': msg['category'] }) #HANDLE REORGS if msg['command'] == 'reorg': logging.warn("Blockchain reorginization at block %s" % msg_data['block_index']) #prune back to and including the specified message_index my_latest_block = prune_my_stale_blocks(msg_data['block_index'] - 1) config.CURRENT_BLOCK_INDEX = msg_data['block_index'] - 1 #for the current last_message_index (which could have gone down after the reorg), query clearinghoused running_info = util.call_jsonrpc_api("get_running_info", abort_on_error=True)['result'] config.LAST_MESSAGE_INDEX = running_info['last_message_index'] #send out the message to listening clients (but don't forward along while we're catching up) if last_processed_block['block_index'] - my_latest_block['block_index'] < config.MAX_REORG_NUM_BLOCKS: msg_data['_last_message_index'] = config.LAST_MESSAGE_INDEX event = util.decorate_message_for_feed(msg, msg_data=msg_data) zmq_publisher_eventfeed.send_json(event) break #break out of inner loop #track assets if msg['category'] == 'issuances': assets.parse_issuance(mongo_db, msg_data, cur_block_index, cur_block) #track balance changes for each address bal_change = None if msg['category'] in ['credits', 'debits',]: actionName = 'credit' if msg['category'] == 'credits' else 'debit' address = msg_data['address'] asset_info = mongo_db.tracked_assets.find_one({ 'asset': msg_data['asset'] }) if asset_info is None: logging.warn("Credit/debit of %s where asset ('%s') does not exist. Ignoring..." % (msg_data['quantity'], msg_data['asset'])) continue quantity = msg_data['quantity'] if msg['category'] == 'credits' else -msg_data['quantity'] quantity_normalized = util_bitcoin.normalize_quantity(quantity, asset_info['divisible']) #look up the previous balance to go off of last_bal_change = mongo_db.balance_changes.find_one({ 'address': address, 'asset': asset_info['asset'] }, sort=[("block_index", pymongo.DESCENDING), ("_id", pymongo.DESCENDING)]) if last_bal_change \ and last_bal_change['block_index'] == cur_block_index: #modify this record, as we want at most one entry per block index for each (address, asset) pair last_bal_change['quantity'] += quantity last_bal_change['quantity_normalized'] += quantity_normalized last_bal_change['new_balance'] += quantity last_bal_change['new_balance_normalized'] += quantity_normalized mongo_db.balance_changes.save(last_bal_change) logging.info("Procesed %s bal change (UPDATED) from tx %s :: %s" % (actionName, msg['message_index'], last_bal_change)) bal_change = last_bal_change else: #new balance change record for this block bal_change = { 'address': address, 'asset': asset_info['asset'], 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'quantity': quantity, 'quantity_normalized': quantity_normalized, 'new_balance': last_bal_change['new_balance'] + quantity if last_bal_change else quantity, 'new_balance_normalized': last_bal_change['new_balance_normalized'] + quantity_normalized if last_bal_change else quantity_normalized, } mongo_db.balance_changes.insert(bal_change) logging.info("Procesed %s bal change from tx %s :: %s" % (actionName, msg['message_index'], bal_change)) #book trades if (msg['category'] == 'order_matches' and ((msg['command'] == 'update' and msg_data['status'] == 'completed') #for a trade with BTC involved, but that is settled (completed) or ('forward_asset' in msg_data and msg_data['forward_asset'] != config.BTC and msg_data['backward_asset'] != config.BTC))): #or for a trade without BTC on either end if msg['command'] == 'update' and msg_data['status'] == 'completed': #an order is being updated to a completed status (i.e. a BTCpay has completed) tx0_hash, tx1_hash = msg_data['order_match_id'][:64], msg_data['order_match_id'][64:] #get the order_match this btcpay settles order_match = util.call_jsonrpc_api("get_order_matches", {'filters': [ {'field': 'tx0_hash', 'op': '==', 'value': tx0_hash}, {'field': 'tx1_hash', 'op': '==', 'value': tx1_hash}] }, abort_on_error=True)['result'][0] else: assert msg_data['status'] == 'completed' #should not enter a pending state for non BTC matches order_match = msg_data forward_asset_info = mongo_db.tracked_assets.find_one({'asset': order_match['forward_asset']}) backward_asset_info = mongo_db.tracked_assets.find_one({'asset': order_match['backward_asset']}) assert forward_asset_info and backward_asset_info base_asset, quote_asset = util.assets_to_asset_pair(order_match['forward_asset'], order_match['backward_asset']) #don't create trade records from order matches with BTC that are under the dust limit if (order_match['forward_asset'] == config.BTC and order_match['forward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF) \ or (order_match['backward_asset'] == config.BTC and order_match['backward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF): logging.debug("Order match %s ignored due to %s under dust limit." % (order_match['tx0_hash'] + order_match['tx1_hash'], config.BTC)) continue #take divisible trade quantities to floating point forward_quantity = util_bitcoin.normalize_quantity(order_match['forward_quantity'], forward_asset_info['divisible']) backward_quantity = util_bitcoin.normalize_quantity(order_match['backward_quantity'], backward_asset_info['divisible']) #compose trade trade = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'message_index': msg['message_index'], #secondary temporaral ordering off of when 'order_match_id': order_match['tx0_hash'] + order_match['tx1_hash'], 'order_match_tx0_index': order_match['tx0_index'], 'order_match_tx1_index': order_match['tx1_index'], 'order_match_tx0_address': order_match['tx0_address'], 'order_match_tx1_address': order_match['tx1_address'], 'base_asset': base_asset, 'quote_asset': quote_asset, 'base_quantity': order_match['forward_quantity'] if order_match['forward_asset'] == base_asset else order_match['backward_quantity'], 'quote_quantity': order_match['backward_quantity'] if order_match['forward_asset'] == base_asset else order_match['forward_quantity'], 'base_quantity_normalized': forward_quantity if order_match['forward_asset'] == base_asset else backward_quantity, 'quote_quantity_normalized': backward_quantity if order_match['forward_asset'] == base_asset else forward_quantity, } trade['unit_price'] = float( ( D(trade['quote_quantity_normalized']) / D(trade['base_quantity_normalized']) ).quantize( D('.00000000'), rounding=decimal.ROUND_HALF_EVEN)) trade['unit_price_inverse'] = float( ( D(trade['base_quantity_normalized']) / D(trade['quote_quantity_normalized']) ).quantize( D('.00000000'), rounding=decimal.ROUND_HALF_EVEN)) mongo_db.trades.insert(trade) logging.info("Procesed Trade from tx %s :: %s" % (msg['message_index'], trade)) #broadcast if msg['category'] == 'broadcasts': betting.parse_broadcast(mongo_db, msg_data) #if we're catching up beyond MAX_REORG_NUM_BLOCKS blocks out, make sure not to send out any socket.io # events, as to not flood on a resync (as we may give a 525 to kick the logged in clients out, but we # can't guarantee that the socket.io connection will always be severed as well??) if last_processed_block['block_index'] - my_latest_block['block_index'] < config.MAX_REORG_NUM_BLOCKS: #send out the message to listening clients event = util.decorate_message_for_feed(msg, msg_data=msg_data) zmq_publisher_eventfeed.send_json(event) #this is the last processed message index config.LAST_MESSAGE_INDEX = msg['message_index'] else: #block successfully processed, track this in our DB new_block = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'block_hash': cur_block['block_hash'], } blocks_to_insert.append(new_block) if last_processed_block['block_index'] - cur_block_index > 1000: #reparsing, do bulk inserts if len(blocks_to_insert) >= 1000: mongo_db.processed_blocks.insert(blocks_to_insert) blocks_to_insert[:] = [] else: mongo_db.processed_blocks.insert(blocks_to_insert) blocks_to_insert[:] = [] my_latest_block = new_block config.CURRENT_BLOCK_INDEX = cur_block_index #get the current blockchain service block if config.BLOCKCHAIN_SERVICE_LAST_BLOCK == 0 or config.BLOCKCHAIN_SERVICE_LAST_BLOCK - config.CURRENT_BLOCK_INDEX < config.MAX_REORG_NUM_BLOCKS: #update as CURRENT_BLOCK_INDEX catches up with BLOCKCHAIN_SERVICE_LAST_BLOCK and/or surpasses it (i.e. if blockchain service gets behind for some reason) try: block_height_response = blockchain.getinfo() except: block_height_response = None config.BLOCKCHAIN_SERVICE_LAST_BLOCK = block_height_response['info']['blocks'] if block_height_response else 0 logging.info("Block: %i (message_index height=%s) (blockchain latest block=%s)" % (config.CURRENT_BLOCK_INDEX, config.LAST_MESSAGE_INDEX if config.LAST_MESSAGE_INDEX != -1 else '???', config.BLOCKCHAIN_SERVICE_LAST_BLOCK if config.BLOCKCHAIN_SERVICE_LAST_BLOCK else '???')) if last_processed_block['block_index'] - cur_block_index < config.MAX_REORG_NUM_BLOCKS: #only when we are near the tip clean_mempool_tx()
"filters": [ {"field": "tx0_hash", "op": "==", "value": tx0_hash}, {"field": "tx1_hash", "op": "==", "value": tx1_hash}, ] }, abort_on_error=True, )["result"][0] else: assert msg_data["status"] == "completed" # should not enter a pending state for non BTC matches order_match = msg_data forward_asset_info = mongo_db.tracked_assets.find_one({"asset": order_match["forward_asset"]}) backward_asset_info = mongo_db.tracked_assets.find_one({"asset": order_match["backward_asset"]}) assert forward_asset_info and backward_asset_info base_asset, quote_asset = util.assets_to_asset_pair( order_match["forward_asset"], order_match["backward_asset"] ) # don't create trade records from order matches with BTC that are under the dust limit if ( order_match["forward_asset"] == config.BTC and order_match["forward_quantity"] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF ) or ( order_match["backward_asset"] == config.BTC and order_match["backward_quantity"] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF ): logging.debug( "Order match %s ignored due to %s under dust limit." % (order_match["tx0_hash"] + order_match["tx1_hash"], config.BTC) ) continue
def get_market_trades(asset1, asset2, addresses=[], limit=100, supplies=None): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_trades = [] sources = "" bindings = ["expired"] if len(addresses) > 0: placeholder = ",".join(["?" for e in range(0, len(addresses))]) sources = """AND (tx0_address IN ({}) OR tx1_address IN ({}))""".format(placeholder, placeholder) bindings += addresses + addresses sql = """SELECT order_matches.*, blocks.block_time FROM order_matches INNER JOIN blocks ON order_matches.block_index=blocks.block_index WHERE status != ? {} AND forward_asset IN (?, ?) AND backward_asset IN (?, ?) ORDER BY block_index DESC""".format( sources ) bindings += [asset1, asset2, asset1, asset2] order_matches = util.call_jsonrpc_api("sql", {"query": sql, "bindings": bindings})["result"] for order_match in order_matches: if order_match["tx0_address"] in addresses: trade = {} trade["match_id"] = order_match["id"] trade["source"] = order_match["tx0_address"] trade["countersource"] = order_match["tx1_address"] trade["block_index"] = order_match["block_index"] trade["block_time"] = order_match["block_time"] trade["status"] = order_match["status"] if order_match["forward_asset"] == base_asset: trade["type"] = "SELL" trade["price"] = format_price( order_match["forward_quantity"], order_match["backward_quantity"], supplies[order_match["forward_asset"]][1], supplies[order_match["backward_asset"]][1], ) trade["amount"] = order_match["forward_quantity"] trade["total"] = order_match["backward_quantity"] else: trade["type"] = "BUY" trade["price"] = format_price( order_match["backward_quantity"], order_match["forward_quantity"], supplies[order_match["backward_asset"]][1], supplies[order_match["forward_asset"]][1], ) trade["amount"] = order_match["backward_quantity"] trade["total"] = order_match["forward_quantity"] market_trades.append(trade) if len(addresses) == 0 or order_match["tx1_address"] in addresses: trade = {} trade["match_id"] = order_match["id"] trade["source"] = order_match["tx1_address"] trade["countersource"] = order_match["tx0_address"] trade["block_index"] = order_match["block_index"] trade["block_time"] = order_match["block_time"] trade["status"] = order_match["status"] if order_match["backward_asset"] == base_asset: trade["type"] = "SELL" trade["price"] = format_price( order_match["backward_quantity"], order_match["forward_quantity"], supplies[order_match["backward_asset"]][1], supplies[order_match["forward_asset"]][1], ) trade["amount"] = order_match["backward_quantity"] trade["total"] = order_match["forward_quantity"] else: trade["type"] = "BUY" trade["price"] = format_price( order_match["forward_quantity"], order_match["backward_quantity"], supplies[order_match["forward_asset"]][1], supplies[order_match["backward_asset"]][1], ) trade["amount"] = order_match["forward_quantity"] trade["total"] = order_match["backward_quantity"] market_trades.append(trade) return market_trades
def compile_asset_pair_market_info(): """Compiles the pair-level statistics that show on the View Prices page of counterwallet, for instance""" #loop through all open orders, and compile a listing of pairs, with a count of open orders for each pair mongo_db = config.mongo_db end_dt = datetime.datetime.utcnow() start_dt = end_dt - datetime.timedelta(days=1) start_block_index, end_block_index = util.get_block_indexes_for_dates(start_dt=start_dt, end_dt=end_dt) open_orders = util.call_jsonrpc_api("get_orders", { 'filters': [ {'field': 'give_remaining', 'op': '>', 'value': 0}, {'field': 'get_remaining', 'op': '>', 'value': 0}, {'field': 'fee_required_remaining', 'op': '>=', 'value': 0}, {'field': 'fee_provided_remaining', 'op': '>=', 'value': 0}, ], 'status': 'open', 'show_expired': False, }, abort_on_error=True)['result'] pair_data = {} asset_info = {} def get_price(base_quantity_normalized, quote_quantity_normalized): return float(D(quote_quantity_normalized / base_quantity_normalized )) #COMPOSE order depth, lowest ask, and highest bid column data for o in open_orders: (base_asset, quote_asset) = util.assets_to_asset_pair(o['give_asset'], o['get_asset']) pair = '%s/%s' % (base_asset, quote_asset) base_asset_info = asset_info.get(base_asset, mongo_db.tracked_assets.find_one({ 'asset': base_asset })) if base_asset not in asset_info: asset_info[base_asset] = base_asset_info quote_asset_info = asset_info.get(quote_asset, mongo_db.tracked_assets.find_one({ 'asset': quote_asset })) if quote_asset not in asset_info: asset_info[quote_asset] = quote_asset_info pair_data.setdefault(pair, {'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None, 'completed_trades_count': 0, 'vol_base': 0, 'vol_quote': 0}) #^ highest ask = open order selling base, highest bid = open order buying base #^ we also initialize completed_trades_count, vol_base, vol_quote because every pair inited here may # not have cooresponding data out of the trades_data_by_pair aggregation below pair_data[pair]['open_orders_count'] += 1 base_quantity_normalized = util_bitcoin.normalize_quantity(o['give_quantity'] if base_asset == o['give_asset'] else o['get_quantity'], base_asset_info['divisible']) quote_quantity_normalized = util_bitcoin.normalize_quantity(o['give_quantity'] if quote_asset == o['give_asset'] else o['get_quantity'], quote_asset_info['divisible']) order_price = get_price(base_quantity_normalized, quote_quantity_normalized) if base_asset == o['give_asset']: #selling base if pair_data[pair]['lowest_ask'] is None or order_price < pair_data[pair]['lowest_ask']: pair_data[pair]['lowest_ask'] = order_price elif base_asset == o['get_asset']: #buying base if pair_data[pair]['highest_bid'] is None or order_price > pair_data[pair]['highest_bid']: pair_data[pair]['highest_bid'] = order_price #COMPOSE volume data (in XCP and BTC), and % change data #loop through all trade volume over the past 24h, and match that to the open orders trades_data_by_pair = mongo_db.trades.aggregate([ {"$match": { "block_time": {"$gte": start_dt, "$lte": end_dt } } }, {"$project": { "base_asset": 1, "quote_asset": 1, "base_quantity_normalized": 1, #to derive base volume "quote_quantity_normalized": 1 #to derive quote volume }}, {"$group": { "_id": {"base_asset": "$base_asset", "quote_asset": "$quote_asset"}, "vol_base": {"$sum": "$base_quantity_normalized"}, "vol_quote": {"$sum": "$quote_quantity_normalized"}, "count": {"$sum": 1}, }} ]) trades_data_by_pair = [] if not trades_data_by_pair['ok'] else trades_data_by_pair['result'] for e in trades_data_by_pair: pair = '%s/%s' % (e['_id']['base_asset'], e['_id']['quote_asset']) pair_data.setdefault(pair, {'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None}) #^ initialize an empty pair in the event there are no open orders for that pair, but there ARE completed trades for it pair_data[pair]['completed_trades_count'] = e['count'] pair_data[pair]['vol_base'] = e['vol_base'] pair_data[pair]['vol_quote'] = e['vol_quote'] #compose price data, relative to BTC and XCP mps_xcp_btc, xcp_btc_price, btc_xcp_price = get_price_primatives() for pair, e in pair_data.iteritems(): base_asset, quote_asset = pair.split('/') _24h_vol_in_btc = None _24h_vol_in_xcp = None #derive asset price data, expressed in BTC and XCP, for the given volumes if base_asset == config.XCP: _24h_vol_in_xcp = e['vol_base'] _24h_vol_in_btc = util_bitcoin.round_out(e['vol_base'] * xcp_btc_price) if xcp_btc_price else 0 elif base_asset == config.BTC: _24h_vol_in_xcp = util_bitcoin.round_out(e['vol_base'] * btc_xcp_price) if btc_xcp_price else 0 _24h_vol_in_btc = e['vol_base'] else: #base is not XCP or BTC price_summary_in_xcp, price_summary_in_btc, price_in_xcp, price_in_btc, aggregated_price_in_xcp, aggregated_price_in_btc = \ get_xcp_btc_price_info(base_asset, mps_xcp_btc, xcp_btc_price, btc_xcp_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if price_in_xcp: _24h_vol_in_xcp = util_bitcoin.round_out(e['vol_base'] * price_in_xcp) if price_in_btc: _24h_vol_in_btc = util_bitcoin.round_out(e['vol_base'] * price_in_btc) if _24h_vol_in_xcp is None or _24h_vol_in_btc is None: #the base asset didn't have price data against BTC or XCP, or both...try against the quote asset instead price_summary_in_xcp, price_summary_in_btc, price_in_xcp, price_in_btc, aggregated_price_in_xcp, aggregated_price_in_btc = \ get_xcp_btc_price_info(quote_asset, mps_xcp_btc, xcp_btc_price, btc_xcp_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if _24h_vol_in_xcp is None and price_in_xcp: _24h_vol_in_xcp = util_bitcoin.round_out(e['vol_quote'] * price_in_xcp) if _24h_vol_in_btc is None and price_in_btc: _24h_vol_in_btc = util_bitcoin.round_out(e['vol_quote'] * price_in_btc) pair_data[pair]['24h_vol_in_{}'.format(config.XCP.lower())] = _24h_vol_in_xcp #might still be None pair_data[pair]['24h_vol_in_{}'.format(config.BTC.lower())] = _24h_vol_in_btc #might still be None #get % change stats -- start by getting the first trade directly before the 24h period starts prev_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset, "block_time": {'$lt': start_dt}}).sort('block_time', pymongo.DESCENDING).limit(1) latest_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset}).sort('block_time', pymongo.DESCENDING).limit(1) if not prev_trade.count(): #no previous trade before this 24hr period pair_data[pair]['24h_pct_change'] = None else: prev_trade = prev_trade[0] latest_trade = latest_trade[0] prev_trade_price = get_price(prev_trade['base_quantity_normalized'], prev_trade['quote_quantity_normalized']) latest_trade_price = get_price(latest_trade['base_quantity_normalized'], latest_trade['quote_quantity_normalized']) pair_data[pair]['24h_pct_change'] = ((latest_trade_price - prev_trade_price) / prev_trade_price) * 100 pair_data[pair]['last_updated'] = end_dt #print "PRODUCED", pair, pair_data[pair] mongo_db.asset_pair_market_info.update( {'base_asset': base_asset, 'quote_asset': quote_asset}, {"$set": pair_data[pair]}, upsert=True) #remove any old pairs that were not just updated mongo_db.asset_pair_market_info.remove({'last_updated': {'$lt': end_dt}}) logging.info("Recomposed 24h trade statistics for %i asset pairs: %s" % (len(pair_data), ', '.join(pair_data.keys())))
#an order is being updated to a completed status (i.e. a BTCpay has completed) tx0_hash, tx1_hash = msg_data['order_match_id'][:64], msg_data['order_match_id'][64:] #get the order_match this btcpay settles order_match = util.call_jsonrpc_api("get_order_matches", {'filters': [ {'field': 'tx0_hash', 'op': '==', 'value': tx0_hash}, {'field': 'tx1_hash', 'op': '==', 'value': tx1_hash}] }, abort_on_error=True)['result'][0] else: assert msg_data['status'] == 'completed' #should not enter a pending state for non BTC matches order_match = msg_data forward_asset_info = mongo_db.tracked_assets.find_one({'asset': order_match['forward_asset']}) backward_asset_info = mongo_db.tracked_assets.find_one({'asset': order_match['backward_asset']}) assert forward_asset_info and backward_asset_info base_asset, quote_asset = util.assets_to_asset_pair(order_match['forward_asset'], order_match['backward_asset']) #don't create trade records from order matches with BTC that are under the dust limit if (order_match['forward_asset'] == config.BTC and order_match['forward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF) \ or (order_match['backward_asset'] == config.BTC and order_match['backward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF): logging.debug("Order match %s ignored due to %s under dust limit." % (order_match['tx0_hash'] + order_match['tx1_hash'], config.BTC)) continue #take divisible trade quantities to floating point forward_quantity = util_bitcoin.normalize_quantity(order_match['forward_quantity'], forward_asset_info['divisible']) backward_quantity = util_bitcoin.normalize_quantity(order_match['backward_quantity'], backward_asset_info['divisible']) #compose trade trade = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'],
def get_market_trades(asset1, asset2, addresses=[], limit=50, supplies=None): limit = min(limit, 100) base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_trades = [] sources = '' bindings = ['expired'] if len(addresses) > 0: placeholder = ','.join(['?' for e in range(0,len(addresses))]) sources = '''AND (tx0_address IN ({}) OR tx1_address IN ({}))'''.format(placeholder, placeholder) bindings += addresses + addresses sql = '''SELECT order_matches.*, blocks.block_time FROM order_matches INNER JOIN blocks ON order_matches.block_index=blocks.block_index WHERE status != ? {} AND forward_asset IN (?, ?) AND backward_asset IN (?, ?) ORDER BY block_index DESC LIMIT ?'''.format(sources) bindings += [asset1, asset2, asset1, asset2, limit] order_matches = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] for order_match in order_matches: if order_match['tx0_address'] in addresses: trade = {} trade['match_id'] = order_match['id'] trade['source'] = order_match['tx0_address'] trade['countersource'] = order_match['tx1_address'] trade['block_index'] = order_match['block_index'] trade['block_time'] = order_match['block_time'] trade['status'] = order_match['status'] if order_match['forward_asset'] == base_asset: trade['type'] = 'SELL' trade['price'] = calculate_price(order_match['forward_quantity'], order_match['backward_quantity'], supplies[order_match['forward_asset']][1], supplies[order_match['backward_asset']][1], 'SELL') trade['amount'] = order_match['forward_quantity'] trade['total'] = order_match['backward_quantity'] else: trade['type'] = 'BUY' trade['price'] = calculate_price(order_match['backward_quantity'], order_match['forward_quantity'], supplies[order_match['backward_asset']][1], supplies[order_match['forward_asset']][1], 'BUY') trade['amount'] = order_match['backward_quantity'] trade['total'] = order_match['forward_quantity'] market_trades.append(trade) if len(addresses)==0 or order_match['tx1_address'] in addresses: trade = {} trade['match_id'] = order_match['id'] trade['source'] = order_match['tx1_address'] trade['countersource'] = order_match['tx0_address'] trade['block_index'] = order_match['block_index'] trade['block_time'] = order_match['block_time'] trade['status'] = order_match['status'] if order_match['backward_asset'] == base_asset: trade['type'] = 'SELL' trade['price'] = calculate_price(order_match['backward_quantity'], order_match['forward_quantity'], supplies[order_match['backward_asset']][1], supplies[order_match['forward_asset']][1], 'SELL') trade['amount'] = order_match['backward_quantity'] trade['total'] = order_match['forward_quantity'] else: trade['type'] = 'BUY' trade['price'] = calculate_price(order_match['forward_quantity'], order_match['backward_quantity'], supplies[order_match['forward_asset']][1], supplies[order_match['backward_asset']][1], 'BUY') trade['amount'] = order_match['forward_quantity'] trade['total'] = order_match['backward_quantity'] market_trades.append(trade) return market_trades
def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provided=0.95, max_fee_required=0.95): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_orders = [] sql = '''SELECT orders.*, blocks.block_time FROM orders INNER JOIN blocks ON orders.block_index=blocks.block_index WHERE status = ? ''' bindings = ['open'] if len(addresses) > 0: sql += '''AND source IN ({}) '''.format(','.join(['?' for e in range(0,len(addresses))])) bindings += addresses sql += '''AND give_remaining > 0 AND give_asset IN (?, ?) AND get_asset IN (?, ?) ORDER BY tx_index DESC''' bindings += [asset1, asset2, asset1, asset2] orders = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] for order in orders: user_order = {} exclude = False if order['give_asset'] == 'BTC': fee_provided = order['fee_provided'] / (order['give_quantity'] / 100) exclude = fee_provided < min_fee_provided user_order['fee_provided'] = format(D(order['fee_provided']) / (D(order['give_quantity']) / D(100)), '.2f') elif order['get_asset'] == 'BTC': fee_required = order['fee_required'] / (order['get_quantity'] / 100) exclude = fee_required > max_fee_required user_order['fee_required'] = format(D(order['fee_required']) / (D(order['get_quantity']) / D(100)), '.2f') if not exclude: if order['give_asset'] == base_asset: price = calculate_price(order['give_quantity'], order['get_quantity'], supplies[order['give_asset']][1], supplies[order['get_asset']][1]) user_order['type'] = 'SELL' user_order['amount'] = order['give_remaining'] user_order['total'] = int(order['give_remaining'] * price) else: price = calculate_price(order['get_quantity'], order['give_quantity'], supplies[order['get_asset']][1], supplies[order['give_asset']][1]) user_order['type'] = 'BUY' user_order['total'] = order['give_remaining'] user_order['amount'] = int(order['give_remaining'] / price) user_order['price'] = format(price, '.8f') if len(addresses) == 0 and len(market_orders) > 0: previous_order = market_orders[-1] if previous_order['type'] == user_order['type'] and previous_order['price'] == user_order['price']: market_orders[-1]['amount'] += user_order['amount'] market_orders[-1]['total'] += user_order['total'] exclude = True if len(addresses) > 0: completed = format(((D(order['give_quantity']) - D(order['give_remaining'])) / D(order['give_quantity'])) * D(100), '.2f') user_order['completion'] = "{}%".format(completed) user_order['tx_index'] = order['tx_index'] user_order['tx_hash'] = order['tx_hash'] user_order['source'] = order['source'] user_order['block_index'] = order['block_index'] user_order['block_time'] = order['block_time'] if not exclude: market_orders.append(user_order) return market_orders
#an order is being updated to a completed status (i.e. a METpay has completed) tx0_hash, tx1_hash = msg_data['order_match_id'][:64], msg_data['order_match_id'][64:] #get the order_match this metpay settles order_match = util.call_jsonrpc_api("get_order_matches", {'filters': [ {'field': 'tx0_hash', 'op': '==', 'value': tx0_hash}, {'field': 'tx1_hash', 'op': '==', 'value': tx1_hash}] }, abort_on_error=True)['result'][0] else: assert msg_data['status'] == 'completed' #should not enter a pending state for non MET matches order_match = msg_data forward_asset_info = mongo_db.tracked_assets.find_one({'asset': order_match['forward_asset']}) backward_asset_info = mongo_db.tracked_assets.find_one({'asset': order_match['backward_asset']}) assert forward_asset_info and backward_asset_info base_asset, quote_asset = util.assets_to_asset_pair(order_match['forward_asset'], order_match['backward_asset']) #don't create trade records from order matches with MET that are under the dust limit if (order_match['forward_asset'] == config.MET and order_match['forward_quantity'] <= config.ORDER_MET_DUST_LIMIT_CUTOFF) \ or (order_match['backward_asset'] == config.MET and order_match['backward_quantity'] <= config.ORDER_MET_DUST_LIMIT_CUTOFF): logging.debug("Order match %s ignored due to %s under dust limit." % (order_match['tx0_hash'] + order_match['tx1_hash'], config.MET)) continue #take divisible trade quantities to floating point forward_quantity = util_metrocoin.normalize_quantity(order_match['forward_quantity'], forward_asset_info['divisible']) backward_quantity = util_metrocoin.normalize_quantity(order_match['backward_quantity'], backward_asset_info['divisible']) #compose trade trade = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'],
def process_cpd_blockfeed(): LATEST_BLOCK_INIT = { 'block_index': config.BLOCK_FIRST, 'block_time': None, 'block_hash': None } mongo_db = config.mongo_db def blow_away_db(): mongo_db.processed_blocks.drop() mongo_db.tracked_assets.drop() mongo_db.trades.drop() mongo_db.balance_changes.drop() mongo_db.asset_market_info.drop() mongo_db.asset_marketcap_history.drop() mongo_db.pair_market_info.drop() mongo_db.btc_open_orders.drop() mongo_db.asset_extended_info.drop() mongo_db.transaction_stats.drop() mongo_db.feeds.drop() mongo_db.wallet_stats.drop() mongo_db.wallet_messages.drop() mongo_db.app_config.update( {}, { 'db_version': config.DB_VERSION, 'running_testnet': config.TESTNET, 'counterpartyd_db_version_major': None, 'counterpartyd_db_version_minor': None, 'counterpartyd_running_testnet': None, 'last_block_assets_compiled': config. BLOCK_FIRST, #for asset data compilation in events.py (resets on reparse as well) }, upsert=True) app_config = mongo_db.app_config.find()[0] for asset in [config.XCP, config.BTC]: base_asset = { 'asset': asset, 'owner': None, 'divisible': True, 'locked': False, 'total_issued': None, '_at_block': config.BLOCK_FIRST, #the block ID this asset is current for '_history': [] #to allow for block rollbacks } mongo_db.tracked_assets.insert(base_asset) mongo_db.wallet_messages.insert({ '_id': 0, 'when': calendar.timegm(time.gmtime()), 'message': None, }) #reinitialize some internal counters config.CURRENT_BLOCK_INDEX = 0 config.LAST_MESSAGE_INDEX = -1 config.cw_last_message_seq = 0 return app_config def prune_my_stale_blocks(max_block_index): assert isinstance(max_block_index, int) if max_block_index <= config.BLOCK_FIRST: max_block_index = config.BLOCK_FIRST + 1 if not mongo_db.processed_blocks.find_one( {"block_index": max_block_index}): raise Exception( "Can't roll back to specified block index: %i doesn't exist in database" % max_block_index) logging.warn("Pruning to block %i ..." % (max_block_index)) mongo_db.processed_blocks.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.balance_changes.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.trades.remove({"block_index": {"$gt": max_block_index}}) mongo_db.asset_marketcap_history.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.transaction_stats.remove( {"block_index": { "$gt": max_block_index }}) #to roll back the state of the tracked asset, dive into the history object for each asset that has # been updated on or after the block that we are pruning back to assets_to_prune = mongo_db.tracked_assets.find( {'_at_block': { "$gt": max_block_index }}) for asset in assets_to_prune: logging.info( "Pruning asset %s (last modified @ block %i, pruning to state at block %i)" % (asset['asset'], asset['_at_block'], max_block_index)) prev_ver = None while len(asset['_history']): prev_ver = asset['_history'].pop() if prev_ver['_at_block'] <= max_block_index: break if prev_ver: if prev_ver['_at_block'] > max_block_index: #even the first history version is newer than max_block_index. #in this case, just remove the asset tracking record itself mongo_db.tracked_assets.remove({'asset': asset['asset']}) else: #if here, we were able to find a previous version that was saved at or before max_block_index # (which should be prev_ver ... restore asset's values to its values prev_ver['_id'] = asset['_id'] prev_ver['_history'] = asset['_history'] mongo_db.tracked_assets.save(prev_ver) config.LAST_MESSAGE_INDEX = -1 config.CAUGHT_UP = False latest_block = mongo_db.processed_blocks.find_one( {"block_index": max_block_index}) return latest_block def publish_mempool_tx(): """fetch new tx from mempool""" tx_hashes = [] mempool_txs = mongo_db.mempool.find(projection={'tx_hash': True}) for mempool_tx in mempool_txs: tx_hashes.append(str(mempool_tx['tx_hash'])) params = None if len(tx_hashes) > 0: params = { 'filters': [{ 'field': 'tx_hash', 'op': 'NOT IN', 'value': tx_hashes }, { 'field': 'category', 'op': 'IN', 'value': ['sends', 'btcpays', 'issuances', 'dividends'] }], 'filterop': 'AND' } new_txs = util.call_jsonrpc_api("get_mempool", params, abort_on_error=True) for new_tx in new_txs['result']: tx = { 'tx_hash': new_tx['tx_hash'], 'command': new_tx['command'], 'category': new_tx['category'], 'bindings': new_tx['bindings'], 'timestamp': new_tx['timestamp'], 'viewed_in_block': config.CURRENT_BLOCK_INDEX } mongo_db.mempool.insert(tx) del (tx['_id']) tx['_category'] = tx['category'] tx['_message_index'] = 'mempool' logging.debug("Spotted mempool tx: %s" % tx) util.store_wallet_message(tx, json.loads(tx['bindings']), decorate=False) def clean_mempool_tx(): """clean mempool transactions older than MAX_REORG_NUM_BLOCKS blocks""" mongo_db.mempool.remove({ "viewed_in_block": { "$lt": config.CURRENT_BLOCK_INDEX - config.MAX_REORG_NUM_BLOCKS } }) config.CURRENT_BLOCK_INDEX = 0 config.LAST_MESSAGE_INDEX = -1 config.BLOCKCHAIN_SERVICE_LAST_BLOCK = 0 #simply for printing/alerting purposes config.CAUGHT_UP_STARTED_EVENTS = False #^ set after we are caught up and start up the recurring events that depend on us being caught up with the blockchain #grab our stored preferences, and rebuild the database if necessary app_config = mongo_db.app_config.find() assert app_config.count() in [0, 1] if (app_config.count() == 0 or config.REPARSE_FORCED or app_config[0]['db_version'] != config.DB_VERSION or app_config[0]['running_testnet'] != config.TESTNET): if app_config.count(): logging.warn( "energyblockd database version UPDATED (from %i to %i) or testnet setting changed (from %s to %s), or REINIT forced (%s). REBUILDING FROM SCRATCH ..." % (app_config[0]['db_version'], config.DB_VERSION, app_config[0]['running_testnet'], config.TESTNET, config.REPARSE_FORCED)) else: logging.warn( "energyblockd database app_config collection doesn't exist. BUILDING FROM SCRATCH..." ) app_config = blow_away_db() my_latest_block = LATEST_BLOCK_INIT else: app_config = app_config[0] #get the last processed block out of mongo my_latest_block = mongo_db.processed_blocks.find_one( sort=[("block_index", pymongo.DESCENDING)]) if my_latest_block: my_latest_block = prune_my_stale_blocks( my_latest_block['block_index']) else: config.CURRENT_BLOCK_INDEX = LATEST_BLOCK_INIT while True: try: running_info = util.call_jsonrpc_api("get_running_info", abort_on_error=True) if 'result' not in running_info: raise AssertionError("Could not contact energypartyd") running_info = running_info['result'] except Exception as e: logging.warn( str(e) + " -- Waiting 30 seconds before trying again...") time.sleep(30) continue if running_info['last_message_index'] == -1: logging.warn( "No last_message_index returned. Waiting until energypartyd has messages..." ) time.sleep(30) continue wipeState = False updatePrefs = False if app_config['counterpartyd_db_version_major'] is None \ or app_config['counterpartyd_db_version_minor'] is None \ or app_config['counterpartyd_running_testnet'] is None: updatePrefs = True elif running_info['version_major'] != app_config[ 'counterpartyd_db_version_major']: logging.warn( "energypartyd MAJOR DB version change (we built from %s, energypartyd is at %s). Wiping our state data." % (app_config['counterpartyd_db_version_major'], running_info['version_major'])) wipeState = True updatePrefs = True elif running_info['version_minor'] != app_config[ 'counterpartyd_db_version_minor']: logging.warn( "energypartyd MINOR DB version change (we built from %s.%s, energypartyd is at %s.%s). Wiping our state data." % (app_config['counterpartyd_db_version_major'], app_config['counterpartyd_db_version_minor'], running_info['version_major'], running_info['version_minor'])) wipeState = True updatePrefs = True elif running_info.get( 'running_testnet', False) != app_config['counterpartyd_running_testnet']: logging.warn( "energypartyd testnet setting change (from %s to %s). Wiping our state data." % (app_config['counterpartyd_running_testnet'], running_info['running_testnet'])) wipeState = True updatePrefs = True if wipeState: app_config = blow_away_db() if updatePrefs: app_config['counterpartyd_db_version_major'] = running_info[ 'version_major'] app_config['counterpartyd_db_version_minor'] = running_info[ 'version_minor'] app_config['counterpartyd_running_testnet'] = running_info[ 'running_testnet'] mongo_db.app_config.update({}, app_config) #reset my latest block record my_latest_block = LATEST_BLOCK_INIT config.CAUGHT_UP = False #You've Come a Long Way, Baby last_processed_block = running_info['last_block'] if last_processed_block['block_index'] is None: logging.warn( "energypartyd has no last processed block (probably is reparsing). Waiting 5 seconds before trying again..." ) time.sleep(5) continue if my_latest_block['block_index'] < last_processed_block['block_index']: #need to catch up config.CAUGHT_UP = False cur_block_index = my_latest_block['block_index'] + 1 #get the blocktime for the next block we have to process try: cur_block = util.call_jsonrpc_api( "get_block_info", {'block_index': cur_block_index}, abort_on_error=True)['result'] except Exception as e: logging.warn( str(e) + " Waiting 5 seconds before trying again...") time.sleep(5) continue cur_block['block_time_obj'] = datetime.datetime.utcfromtimestamp( cur_block['block_time']) cur_block['block_time_str'] = cur_block[ 'block_time_obj'].isoformat() try: block_data = util.call_jsonrpc_api( "get_messages", {'block_index': cur_block_index}, abort_on_error=True)['result'] except Exception as e: logging.warn( str(e) + " Waiting 15 seconds before trying again...") time.sleep(15) continue #parse out response (list of txns, ordered as they appeared in the block) for msg in block_data: msg_data = json.loads(msg['bindings']) if msg['message_index'] != config.LAST_MESSAGE_INDEX + 1 and config.LAST_MESSAGE_INDEX != -1: logging.error( "BUG: MESSAGE RECEIVED NOT WHAT WE EXPECTED. EXPECTED: %s, GOT: %s: %s (ALL MSGS IN get_messages PAYLOAD: %s)..." % (config.LAST_MESSAGE_INDEX + 1, msg['message_index'], msg, [m['message_index'] for m in block_data])) my_latest_block = prune_my_stale_blocks( cur_block_index - config.MAX_FORCED_REORG_NUM_BLOCKS) break #sys.exit(1) #FOR NOW if msg['message_index'] <= config.LAST_MESSAGE_INDEX: logging.warn("BUG: IGNORED old RAW message %s: %s ..." % (msg['message_index'], msg)) continue logging.info("Received message %s: %s ..." % (msg['message_index'], msg)) status = msg_data.get('status', 'valid').lower() if status.startswith('invalid'): if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: util.store_wallet_message(msg, msg_data) config.LAST_MESSAGE_INDEX = msg['message_index'] continue #track message types, for compiling of statistics if msg['command'] == 'insert' \ and msg['category'] not in ["debits", "credits", "order_matches", "bet_matches", "order_expirations", "bet_expirations", "order_match_expirations", "bet_match_expirations", "bet_match_resolutions"]: try: mongo_db.transaction_stats.insert({ 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'category': msg['category'] }) except pymongo.errors.DuplicateKeyError as e: logging.exception(e) #HANDLE REORGS if msg['command'] == 'reorg': logging.warn("Blockchain reorginization at block %s" % msg_data['block_index']) #prune back to and including the specified message_index my_latest_block = prune_my_stale_blocks( msg_data['block_index'] - 1) config.CURRENT_BLOCK_INDEX = msg_data['block_index'] - 1 running_info = util.call_jsonrpc_api( "get_running_info", abort_on_error=True)['result'] config.LAST_MESSAGE_INDEX = running_info[ 'last_message_index'] if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: msg_data[ '_last_message_index'] = config.LAST_MESSAGE_INDEX util.store_wallet_message(msg, msg_data) event = util.decorate_message_for_feed( msg, msg_data=msg_data) break #break out of inner loop #track assets if msg['category'] == 'issuances': assets.parse_issuance(mongo_db, msg_data, cur_block_index, cur_block) #track balance changes for each address bal_change = None if msg['category'] in [ 'credits', 'debits', ]: actionName = 'credit' if msg[ 'category'] == 'credits' else 'debit' address = msg_data['address'] asset_info = mongo_db.tracked_assets.find_one( {'asset': msg_data['asset']}) if asset_info is None: logging.warn( "Credit/debit of %s where asset ('%s') does not exist. Ignoring..." % (msg_data['quantity'], msg_data['asset'])) continue quantity = msg_data['quantity'] if msg[ 'category'] == 'credits' else -msg_data['quantity'] quantity_normalized = util_bitcoin.normalize_quantity( quantity, asset_info['divisible']) #look up the previous balance to go off of last_bal_change = mongo_db.balance_changes.find_one( { 'address': address, 'asset': asset_info['asset'] }, sort=[("block_index", pymongo.DESCENDING), ("_id", pymongo.DESCENDING)]) if last_bal_change \ and last_bal_change['block_index'] == cur_block_index: last_bal_change['quantity'] += quantity last_bal_change[ 'quantity_normalized'] += quantity_normalized last_bal_change['new_balance'] += quantity last_bal_change[ 'new_balance_normalized'] += quantity_normalized mongo_db.balance_changes.save(last_bal_change) logging.info( "Procesed %s bal change (UPDATED) from tx %s :: %s" % (actionName, msg['message_index'], last_bal_change)) bal_change = last_bal_change else: #new balance change record for this block bal_change = { 'address': address, 'asset': asset_info['asset'], 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'quantity': quantity, 'quantity_normalized': quantity_normalized, 'new_balance': last_bal_change['new_balance'] + quantity if last_bal_change else quantity, 'new_balance_normalized': last_bal_change['new_balance_normalized'] + quantity_normalized if last_bal_change else quantity_normalized, } mongo_db.balance_changes.insert(bal_change) logging.info( "Procesed %s bal change from tx %s :: %s" % (actionName, msg['message_index'], bal_change)) #book trades if (msg['category'] == 'order_matches' and ((msg['command'] == 'update' and msg_data['status'] == 'completed') or ('forward_asset' in msg_data and msg_data['forward_asset'] != config.BTC and msg_data['backward_asset'] != config.BTC))): if msg['command'] == 'update' and msg_data[ 'status'] == 'completed': tx0_hash, tx1_hash = msg_data[ 'order_match_id'][:64], msg_data['order_match_id'][ 64:] order_match = util.call_jsonrpc_api( "get_order_matches", { 'filters': [{ 'field': 'tx0_hash', 'op': '==', 'value': tx0_hash }, { 'field': 'tx1_hash', 'op': '==', 'value': tx1_hash }] }, abort_on_error=False)['result'][0] else: assert msg_data['status'] == 'completed' order_match = msg_data forward_asset_info = mongo_db.tracked_assets.find_one( {'asset': order_match['forward_asset']}) backward_asset_info = mongo_db.tracked_assets.find_one( {'asset': order_match['backward_asset']}) assert forward_asset_info and backward_asset_info base_asset, quote_asset = util.assets_to_asset_pair( order_match['forward_asset'], order_match['backward_asset']) if (order_match['forward_asset'] == config.BTC and order_match['forward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF) \ or (order_match['backward_asset'] == config.BTC and order_match['backward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF): logging.debug( "Order match %s ignored due to %s under dust limit." % (order_match['tx0_hash'] + order_match['tx1_hash'], config.BTC)) continue #take divisible trade quantities to floating point forward_quantity = util_bitcoin.normalize_quantity( order_match['forward_quantity'], forward_asset_info['divisible']) backward_quantity = util_bitcoin.normalize_quantity( order_match['backward_quantity'], backward_asset_info['divisible']) #compose trade trade = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'message_index': msg['message_index'], #secondary temporaral ordering off of when 'order_match_id': order_match['tx0_hash'] + '_' + order_match['tx1_hash'], 'order_match_tx0_index': order_match['tx0_index'], 'order_match_tx1_index': order_match['tx1_index'], 'order_match_tx0_address': order_match['tx0_address'], 'order_match_tx1_address': order_match['tx1_address'], 'base_asset': base_asset, 'quote_asset': quote_asset, 'base_quantity': order_match['forward_quantity'] if order_match['forward_asset'] == base_asset else order_match['backward_quantity'], 'quote_quantity': order_match['backward_quantity'] if order_match['forward_asset'] == base_asset else order_match['forward_quantity'], 'base_quantity_normalized': forward_quantity if order_match['forward_asset'] == base_asset else backward_quantity, 'quote_quantity_normalized': backward_quantity if order_match['forward_asset'] == base_asset else forward_quantity, } d = D(trade['quote_quantity_normalized']) / D( trade['base_quantity_normalized']) d = d.quantize(EIGHT_PLACES, rounding=decimal.ROUND_HALF_EVEN, context=decimal.Context(prec=20)) trade['unit_price'] = float(d) d = D(trade['base_quantity_normalized']) / D( trade['quote_quantity_normalized']) d = d.quantize(EIGHT_PLACES, rounding=decimal.ROUND_HALF_EVEN, context=decimal.Context(prec=20)) trade['unit_price_inverse'] = float(d) mongo_db.trades.insert(trade) logging.info("Procesed Trade from tx %s :: %s" % (msg['message_index'], trade)) #broadcast if msg['category'] == 'broadcasts': betting.parse_broadcast(mongo_db, msg_data) if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: #send out the message to listening clients util.store_wallet_message(msg, msg_data) #this is the last processed message index config.LAST_MESSAGE_INDEX = msg['message_index'] new_block = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'block_hash': cur_block['block_hash'], } mongo_db.processed_blocks.insert(new_block) my_latest_block = new_block config.CURRENT_BLOCK_INDEX = cur_block_index if config.BLOCKCHAIN_SERVICE_LAST_BLOCK == 0 or config.BLOCKCHAIN_SERVICE_LAST_BLOCK - config.CURRENT_BLOCK_INDEX < config.MAX_REORG_NUM_BLOCKS: try: block_height_response = blockchain.getinfo() except: block_height_response = None config.BLOCKCHAIN_SERVICE_LAST_BLOCK = block_height_response[ 'info']['blocks'] if block_height_response else 0 logging.info( "Block: %i (message_index height=%s) (blockchain latest block=%s)" % (config.CURRENT_BLOCK_INDEX, config.LAST_MESSAGE_INDEX if config.LAST_MESSAGE_INDEX != -1 else '???', config.BLOCKCHAIN_SERVICE_LAST_BLOCK if config.BLOCKCHAIN_SERVICE_LAST_BLOCK else '???')) clean_mempool_tx() elif my_latest_block['block_index'] > last_processed_block[ 'block_index']: logging.error( "Very odd: Ahead of energypartyd with block indexes! Pruning back %s blocks to be safe." % config.MAX_REORG_NUM_BLOCKS) my_latest_block = prune_my_stale_blocks( last_processed_block['block_index'] - config.MAX_REORG_NUM_BLOCKS) else: config.CAUGHT_UP = running_info['db_caught_up'] if config.LAST_MESSAGE_INDEX == -1 or config.CURRENT_BLOCK_INDEX == 0: if config.LAST_MESSAGE_INDEX == -1: config.LAST_MESSAGE_INDEX = running_info[ 'last_message_index'] if config.CURRENT_BLOCK_INDEX == 0: config.CURRENT_BLOCK_INDEX = running_info['last_block'][ 'block_index'] logging.info( "Detected blocks caught up on startup. Setting last message idx to %s, current block index to %s ..." % (config.LAST_MESSAGE_INDEX, config.CURRENT_BLOCK_INDEX)) if config.CAUGHT_UP and not config.CAUGHT_UP_STARTED_EVENTS: logging.debug( "Starting event timer: compile_asset_pair_market_info") gevent.spawn(events.compile_asset_pair_market_info) logging.debug( "Starting event timer: compile_asset_market_info") gevent.spawn(events.compile_asset_market_info) logging.debug( "Starting event timer: compile_extended_asset_info") gevent.spawn(events.compile_extended_asset_info) logging.debug( "Starting event timer: compile_extended_feed_info") gevent.spawn(events.compile_extended_feed_info) config.CAUGHT_UP_STARTED_EVENTS = True publish_mempool_tx() time.sleep(30)
def compile_asset_pair_market_info(): """Compiles the pair-level statistics that show on the View Prices page of litetokenswallet, for instance""" #loop through all open orders, and compile a listing of pairs, with a count of open orders for each pair mongo_db = config.mongo_db end_dt = datetime.datetime.utcnow() start_dt = end_dt - datetime.timedelta(days=1) start_block_index, end_block_index = util.get_block_indexes_for_dates(start_dt=start_dt, end_dt=end_dt) open_orders = util.call_jsonrpc_api("get_orders", { 'filters': [ {'field': 'give_remaining', 'op': '>', 'value': 0}, {'field': 'get_remaining', 'op': '>', 'value': 0}, {'field': 'fee_required_remaining', 'op': '>=', 'value': 0}, {'field': 'fee_provided_remaining', 'op': '>=', 'value': 0}, ], 'status': 'open', 'show_expired': False, }, abort_on_error=True)['result'] pair_data = {} asset_info = {} def get_price(base_quantity_normalized, quote_quantity_normalized): return float(D(quote_quantity_normalized / base_quantity_normalized )) #COMPOSE order depth, lowest ask, and highest bid column data for o in open_orders: (base_asset, quote_asset) = util.assets_to_asset_pair(o['give_asset'], o['get_asset']) pair = '%s/%s' % (base_asset, quote_asset) base_asset_info = asset_info.get(base_asset, mongo_db.tracked_assets.find_one({ 'asset': base_asset })) if base_asset not in asset_info: asset_info[base_asset] = base_asset_info quote_asset_info = asset_info.get(quote_asset, mongo_db.tracked_assets.find_one({ 'asset': quote_asset })) if quote_asset not in asset_info: asset_info[quote_asset] = quote_asset_info pair_data.setdefault(pair, {'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None, 'completed_trades_count': 0, 'vol_base': 0, 'vol_quote': 0}) #^ highest ask = open order selling base, highest bid = open order buying base #^ we also initialize completed_trades_count, vol_base, vol_quote because every pair inited here may # not have cooresponding data out of the trades_data_by_pair aggregation below pair_data[pair]['open_orders_count'] += 1 base_quantity_normalized = util_litecoin.normalize_quantity(o['give_quantity'] if base_asset == o['give_asset'] else o['get_quantity'], base_asset_info['divisible']) quote_quantity_normalized = util_litecoin.normalize_quantity(o['give_quantity'] if quote_asset == o['give_asset'] else o['get_quantity'], quote_asset_info['divisible']) order_price = get_price(base_quantity_normalized, quote_quantity_normalized) if base_asset == o['give_asset']: #selling base if pair_data[pair]['lowest_ask'] is None or order_price < pair_data[pair]['lowest_ask']: pair_data[pair]['lowest_ask'] = order_price elif base_asset == o['get_asset']: #buying base if pair_data[pair]['highest_bid'] is None or order_price > pair_data[pair]['highest_bid']: pair_data[pair]['highest_bid'] = order_price #COMPOSE volume data (in XLT and LTC), and % change data #loop through all trade volume over the past 24h, and match that to the open orders trades_data_by_pair = mongo_db.trades.aggregate([ {"$match": { "block_time": {"$gte": start_dt, "$lte": end_dt } } }, {"$project": { "base_asset": 1, "quote_asset": 1, "base_quantity_normalized": 1, #to derive base volume "quote_quantity_normalized": 1 #to derive quote volume }}, {"$group": { "_id": {"base_asset": "$base_asset", "quote_asset": "$quote_asset"}, "vol_base": {"$sum": "$base_quantity_normalized"}, "vol_quote": {"$sum": "$quote_quantity_normalized"}, "count": {"$sum": 1}, }} ]) trades_data_by_pair = [] if not trades_data_by_pair['ok'] else trades_data_by_pair['result'] for e in trades_data_by_pair: pair = '%s/%s' % (e['_id']['base_asset'], e['_id']['quote_asset']) pair_data.setdefault(pair, {'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None}) #^ initialize an empty pair in the event there are no open orders for that pair, but there ARE completed trades for it pair_data[pair]['completed_trades_count'] = e['count'] pair_data[pair]['vol_base'] = e['vol_base'] pair_data[pair]['vol_quote'] = e['vol_quote'] #compose price data, relative to LTC and XLT mps_xlt_ltc, xlt_ltc_price, ltc_xlt_price = get_price_primatives() for pair, e in pair_data.iteritems(): base_asset, quote_asset = pair.split('/') _24h_vol_in_ltc = None _24h_vol_in_xlt = None #derive asset price data, expressed in LTC and XLT, for the given volumes if base_asset == config.XLT: _24h_vol_in_xlt = e['vol_base'] _24h_vol_in_ltc = util_litecoin.round_out(e['vol_base'] * xlt_ltc_price) if xlt_ltc_price else 0 elif base_asset == config.LTC: _24h_vol_in_xlt = util_litecoin.round_out(e['vol_base'] * ltc_xlt_price) if ltc_xlt_price else 0 _24h_vol_in_ltc = e['vol_base'] else: #base is not XLT or LTC price_summary_in_xlt, price_summary_in_ltc, price_in_xlt, price_in_ltc, aggregated_price_in_xlt, aggregated_price_in_ltc = \ get_xlt_ltc_price_info(base_asset, mps_xlt_ltc, xlt_ltc_price, ltc_xlt_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if price_in_xlt: _24h_vol_in_xlt = util_litecoin.round_out(e['vol_base'] * price_in_xlt) if price_in_ltc: _24h_vol_in_ltc = util_litecoin.round_out(e['vol_base'] * price_in_ltc) if _24h_vol_in_xlt is None or _24h_vol_in_ltc is None: #the base asset didn't have price data against LTC or XLT, or both...try against the quote asset instead price_summary_in_xlt, price_summary_in_ltc, price_in_xlt, price_in_ltc, aggregated_price_in_xlt, aggregated_price_in_ltc = \ get_xlt_ltc_price_info(quote_asset, mps_xlt_ltc, xlt_ltc_price, ltc_xlt_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if _24h_vol_in_xlt is None and price_in_xlt: _24h_vol_in_xlt = util_litecoin.round_out(e['vol_quote'] * price_in_xlt) if _24h_vol_in_ltc is None and price_in_ltc: _24h_vol_in_ltc = util_litecoin.round_out(e['vol_quote'] * price_in_ltc) pair_data[pair]['24h_vol_in_{}'.format(config.XLT.lower())] = _24h_vol_in_xlt #might still be None pair_data[pair]['24h_vol_in_{}'.format(config.LTC.lower())] = _24h_vol_in_ltc #might still be None #get % change stats -- start by getting the first trade directly before the 24h period starts prev_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset, "block_time": {'$lt': start_dt}}).sort('block_time', pymongo.DESCENDING).limit(1) latest_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset}).sort('block_time', pymongo.DESCENDING).limit(1) if not prev_trade.count(): #no previous trade before this 24hr period pair_data[pair]['24h_pct_change'] = None else: prev_trade = prev_trade[0] latest_trade = latest_trade[0] prev_trade_price = get_price(prev_trade['base_quantity_normalized'], prev_trade['quote_quantity_normalized']) latest_trade_price = get_price(latest_trade['base_quantity_normalized'], latest_trade['quote_quantity_normalized']) pair_data[pair]['24h_pct_change'] = ((latest_trade_price - prev_trade_price) / prev_trade_price) * 100 pair_data[pair]['last_updated'] = end_dt #print "PRODUCED", pair, pair_data[pair] mongo_db.asset_pair_market_info.update( {'base_asset': base_asset, 'quote_asset': quote_asset}, {"$set": pair_data[pair]}, upsert=True) #remove any old pairs that were not just updated mongo_db.asset_pair_market_info.remove({'last_updated': {'$lt': end_dt}}) logging.info("Recomposed 24h trade statistics for %i asset pairs: %s" % (len(pair_data), ', '.join(pair_data.keys())))
def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provided=0.95, max_fee_required=0.95): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_orders = [] buy_orders = [] sell_orders = [] sql = '''SELECT orders.*, blocks.block_time FROM orders INNER JOIN blocks ON orders.block_index=blocks.block_index WHERE status = ? ''' bindings = ['open'] if len(addresses) > 0: sql += '''AND source IN ({}) '''.format(','.join(['?' for e in range(0,len(addresses))])) bindings += addresses sql += '''AND give_remaining > 0 AND give_asset IN (?, ?) AND get_asset IN (?, ?) ORDER BY tx_index DESC''' bindings += [asset1, asset2, asset1, asset2] orders = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] for order in orders: market_order = {} exclude = False if order['give_asset'] == 'ENRG': try: fee_provided = order['fee_provided'] / (order['give_quantity'] / 100) market_order['fee_provided'] = format(D(order['fee_provided']) / (D(order['give_quantity']) / D(100)), '.2f') except Exception as e: fee_provided = min_fee_provided - 1 # exclude exclude = fee_provided < min_fee_provided elif order['get_asset'] == 'ENRG': try: fee_required = order['fee_required'] / (order['get_quantity'] / 100) market_order['fee_required'] = format(D(order['fee_required']) / (D(order['get_quantity']) / D(100)), '.2f') except Exception as e: fee_required = max_fee_required + 1 # exclude exclude = fee_required > max_fee_required if not exclude: if order['give_asset'] == base_asset: try: price = calculate_price(order['give_quantity'], order['get_quantity'], supplies[order['give_asset']][1], supplies[order['get_asset']][1], 'SELL') except: continue market_order['type'] = 'SELL' market_order['amount'] = order['give_remaining'] market_order['total'] = (order['give_remaining']) * float(price) if not supplies[order['give_asset']][1] and supplies[order['get_asset']][1]: market_order['total'] = int(market_order['total'] * config.UNIT) elif supplies[order['give_asset']][1] and not supplies[order['get_asset']][1]: market_order['total'] = int(market_order['total'] / config.UNIT) else: market_order['total'] = int(market_order['total']) else: try: price = calculate_price(order['get_quantity'], order['give_quantity'], supplies[order['get_asset']][1], supplies[order['give_asset']][1], 'BUY') except: continue market_order['type'] = 'BUY' market_order['total'] = order['give_remaining'] market_order['amount'] = (order['give_remaining']) / float(price) if supplies[order['give_asset']][1] and not supplies[order['get_asset']][1]: market_order['amount'] = int(market_order['amount'] / config.UNIT) elif not supplies[order['give_asset']][1] and supplies[order['get_asset']][1]: market_order['amount'] = int(market_order['amount'] * config.UNIT) else: market_order['amount'] = int(market_order['amount']) market_order['price'] = price if len(addresses) > 0: completed = format(((D(order['give_quantity']) - D(order['give_remaining'])) / D(order['give_quantity'])) * D(100), '.2f') market_order['completion'] = "{}%".format(completed) market_order['tx_index'] = order['tx_index'] market_order['tx_hash'] = order['tx_hash'] market_order['source'] = order['source'] market_order['block_index'] = order['block_index'] market_order['block_time'] = order['block_time'] market_orders.append(market_order) else: if market_order['type'] == 'SELL': sell_orders.append(market_order) else: buy_orders.append(market_order) if len(addresses) == 0: market_orders = merge_same_price_orders(sell_orders) + merge_same_price_orders(buy_orders) return market_orders