def get_assets_supply(assets=[]): supplies = {} if 'NGM' in assets: supplies['NGM'] = (util.call_jsonrpc_api('get_xcp_supply', [])['result'], True) assets.remove('NGM') if 'XTO' in assets: supplies['XTO'] = (0, True) assets.remove('XTO') if len(assets) > 0: sql = '''SELECT asset, SUM(quantity) AS supply, divisible FROM issuances WHERE asset IN ({}) AND status = ? GROUP BY asset ORDER BY asset'''.format(','.join( ['?' for e in range(0, len(assets))])) bindings = assets + ['valid'] issuances = util.call_jsonrpc_api('sql', { 'query': sql, 'bindings': bindings })['result'] for issuance in issuances: supplies[issuance['asset']] = (issuance['supply'], issuance['divisible']) return supplies
def get_assets_supply(assets=[]): supplies = {} if "XDP" in assets: supplies["XDP"] = (util.call_jsonrpc_api("get_xcp_supply", [])["result"], True) assets.remove("XDP") if "DOGE" in assets: supplies["DOGE"] = (0, True) assets.remove("DOGE") if len(assets) > 0: sql = """SELECT asset, SUM(quantity) AS supply, divisible FROM issuances WHERE asset IN ({}) AND status = ? GROUP BY asset ORDER BY asset""".format( ",".join(["?" for e in range(0, len(assets))]) ) bindings = assets + ["valid"] issuances = util.call_jsonrpc_api("sql", {"query": sql, "bindings": bindings})["result"] for issuance in issuances: supplies[issuance["asset"]] = (issuance["supply"], issuance["divisible"]) return supplies
def get_assets_supply(assets=[]): supplies = {} if 'XBJ' in assets: supplies['XBJ'] = (util.call_jsonrpc_api('get_xbj_supply', [])['result'], True) assets.remove('XBJ') if 'WDC' in assets: supplies['WDC'] = (0, True) assets.remove('WDC') if len(assets) > 0: sql = '''SELECT asset, SUM(quantity) AS supply, divisible FROM issuances WHERE asset IN ({}) AND status = ? GROUP BY asset ORDER BY asset'''.format(','.join(['?' for e in range(0,len(assets))])) bindings = assets + ['valid'] issuances = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] for issuance in issuances: supplies[issuance['asset']] = (issuance['supply'], issuance['divisible']) return supplies
def get_asset_info(asset, at_dt=None): mongo_db = config.mongo_db asset_info = mongo_db.tracked_assets.find_one({'asset': asset}) if asset not in (config.XLT, config.LTC) and at_dt and asset_info['_at_block_time'] > at_dt: #get the asset info at or before the given at_dt datetime for e in reversed(asset_info['_history']): #newest to oldest if e['_at_block_time'] <= at_dt: asset_info = e break else: #asset was created AFTER at_dt asset_info = None if asset_info is None: return None assert asset_info['_at_block_time'] <= at_dt #modify some of the properties of the returned asset_info for LTC and XLT if asset == config.LTC: if at_dt: start_block_index, end_block_index = util.get_block_indexes_for_dates(end_dt=at_dt) asset_info['total_issued'] = util_litecoin.get_ltc_supply(normalize=False, at_block_index=end_block_index) asset_info['total_issued_normalized'] = util_litecoin.normalize_quantity(asset_info['total_issued']) else: asset_info['total_issued'] = util_litecoin.get_ltc_supply(normalize=False) asset_info['total_issued_normalized'] = util_litecoin.normalize_quantity(asset_info['total_issued']) elif asset == config.XLT: #BUG: this does not take end_dt (if specified) into account. however, the deviation won't be too big # as XLT doesn't deflate quickly at all, and shouldn't matter that much since there weren't any/much trades # before the end of the burn period (which is what is involved with how we use at_dt with currently) asset_info['total_issued'] = util.call_jsonrpc_api("get_xlt_supply", abort_on_error=True)['result'] asset_info['total_issued_normalized'] = util_litecoin.normalize_quantity(asset_info['total_issued']) if not asset_info: raise Exception("Invalid asset: %s" % asset) return asset_info
def get_xcp_or_btc_pairs(asset='XEP', exclude_pairs=[], max_pairs=12, from_time=None): bindings = [] sql = '''SELECT (CASE WHEN forward_asset = ? THEN backward_asset ELSE forward_asset END) AS base_asset, (CASE WHEN backward_asset = ? THEN backward_asset ELSE forward_asset END) AS quote_asset, (CASE WHEN backward_asset = ? THEN (forward_asset || '/' || backward_asset) ELSE (backward_asset || '/' || forward_asset) END) AS pair, (CASE WHEN forward_asset = ? THEN backward_quantity ELSE forward_quantity END) AS bq, (CASE WHEN backward_asset = ? THEN backward_quantity ELSE forward_quantity END) AS qq ''' if from_time: sql += ''', block_time ''' sql += '''FROM order_matches ''' bindings += [asset, asset, asset, asset, asset] if from_time: sql += '''INNER JOIN blocks ON order_matches.block_index = blocks.block_index ''' if asset == 'XEP': sql += '''WHERE ((forward_asset = ? AND backward_asset != ?) OR (forward_asset != ? AND backward_asset = ?)) ''' bindings += [asset, 'ENRG', 'ENRG', asset] else: sql += '''WHERE ((forward_asset = ?) OR (backward_asset = ?)) ''' bindings += [asset, asset] if len(exclude_pairs) > 0: sql += '''AND pair NOT IN ({}) '''.format(','.join(['?' for e in range(0,len(exclude_pairs))])) bindings += exclude_pairs if from_time: sql += '''AND block_time > ? ''' bindings += [from_time] sql += '''AND forward_asset != backward_asset AND status = ?''' bindings += ['completed', max_pairs] sql = '''SELECT base_asset, quote_asset, pair, SUM(bq) AS base_quantity, SUM(qq) AS quote_quantity FROM ({}) GROUP BY pair ORDER BY quote_quantity LIMIT ?'''.format(sql) return util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result']
def publish_mempool_tx(): """fetch new tx from mempool""" tx_hashes = [] mempool_txs = mongo_db.mempool.find(fields={'tx_hash': True}) for mempool_tx in mempool_txs: tx_hashes.append(str(mempool_tx['tx_hash'])) params = None if len(tx_hashes) > 0: params = { 'filters': [ {'field':'tx_hash', 'op': 'NOT IN', 'value': tx_hashes}, {'field':'category', 'op': 'IN', 'value': ['sends', 'btcpays', 'issuances', 'dividends', 'callbacks']} ], 'filterop': 'AND' } new_txs = util.call_jsonrpc_api("get_mempool", params, abort_on_error=True) for new_tx in new_txs['result']: tx = { 'tx_hash': new_tx['tx_hash'], 'command': new_tx['command'], 'category': new_tx['category'], 'bindings': new_tx['bindings'], 'timestamp': new_tx['timestamp'], 'viewed_in_block': config.CURRENT_BLOCK_INDEX } mongo_db.mempool.insert(tx) del(tx['_id']) tx['_category'] = tx['category'] tx['_message_index'] = 'mempool' logging.debug("Spotted mempool tx: %s" % tx) zmq_publisher_eventfeed.send_json(tx)
def get_pair_price(base_asset, quote_asset, max_block_time=None, supplies=None): if not supplies: supplies = get_assets_supply([base_asset, quote_asset]) sql = """SELECT *, MAX(tx0_index, tx1_index) AS tx_index, blocks.block_time FROM order_matches INNER JOIN blocks ON order_matches.block_index = blocks.block_index WHERE forward_asset IN (?, ?) AND backward_asset IN (?, ?) """ bindings = [base_asset, quote_asset, base_asset, quote_asset] if max_block_time: sql += """AND block_time <= ? """ bindings += [max_block_time] sql += """ORDER BY tx_index DESC LIMIT 2""" order_matches = util.call_jsonrpc_api("sql", {"query": sql, "bindings": bindings})["result"] if len(order_matches) == 0: last_price = D(0.0) elif order_matches[0]["forward_asset"] == base_asset: last_price = calculate_price( order_matches[0]["forward_quantity"], order_matches[0]["backward_quantity"], supplies[order_matches[0]["forward_asset"]][1], supplies[order_matches[0]["backward_asset"]][1], ) else: last_price = calculate_price( order_matches[0]["backward_quantity"], order_matches[0]["forward_quantity"], supplies[order_matches[0]["backward_asset"]][1], supplies[order_matches[0]["forward_asset"]][1], ) trend = 0 if len(order_matches) == 2: if order_matches[1]["forward_asset"] == base_asset: before_last_price = calculate_price( order_matches[0]["forward_quantity"], order_matches[0]["backward_quantity"], supplies[order_matches[0]["forward_asset"]][1], supplies[order_matches[0]["backward_asset"]][1], ) else: before_last_price = calculate_price( order_matches[0]["backward_quantity"], order_matches[0]["forward_quantity"], supplies[order_matches[0]["backward_asset"]][1], supplies[order_matches[0]["forward_asset"]][1], ) if last_price < before_last_price: trend = -1 elif last_price > before_last_price: trend = 1 return D(last_price), trend
def parse_base64_feed(base64_feed): decoded_feed = base64.b64decode(base64_feed) feed = json.loads(decoded_feed) if isinstance(feed, dict) and "feed" in feed: errors = util.is_valid_json(feed["feed"], config.FEED_SCHEMA) if len(errors) > 0: raise Exception("Invalid json: {}".format(", ".join(errors))) # get broadcast infos params = { "filters": {"field": "source", "op": "=", "value": feed["feed"]["address"]}, "order_by": "tx_index", "order_dir": "DESC", "limit": 1, } broadcasts = util.call_jsonrpc_api("get_broadcasts", params)["result"] if len(broadcasts) == 0: raise Exception("invalid feed address") complete_feed = {} complete_feed["fee_fraction_int"] = broadcasts[0]["fee_fraction_int"] complete_feed["source"] = broadcasts[0]["source"] complete_feed["locked"] = broadcasts[0]["locked"] complete_feed["counters"] = get_feed_counters(broadcasts[0]["source"]) complete_feed["info_data"] = sanitize_json_data(feed["feed"]) feed["feed"] = complete_feed return feed
def find_feed(db, url_or_address): conditions = {"$or": [{"source": url_or_address}, {"info_url": url_or_address}], "info_status": "valid"} result = {} feeds = db.feeds.find(spec=conditions, fields={"_id": False}, limit=1) for feed in feeds: if "targets" not in feed["info_data"] or ( "type" in feed["info_data"] and feed["info_data"]["type"] in ["all", "cfd"] ): feed["info_data"]["next_broadcast"] = util.next_interval_date(feed["info_data"]["broadcast_date"]) feed["info_data"]["next_deadline"] = util.next_interval_date(feed["info_data"]["deadline"]) result = feed result["counters"] = get_feed_counters(feed["source"]) if "counters" not in result: params = { "filters": {"field": "source", "op": "=", "value": url_or_address}, "order_by": "tx_index", "order_dir": "DESC", "limit": 10, } broadcasts = util.call_jsonrpc_api("get_broadcasts", params)["result"] if broadcasts: return {"broadcasts": broadcasts, "counters": get_feed_counters(url_or_address)} return result
def find_feed(db, url_or_address): conditions = { '$or': [{'source': url_or_address}, {'info_url': url_or_address}], 'info_status': 'valid' } result = {} feeds = db.feeds.find(conditions, projection={'_id': False}, limit=1) for feed in feeds: if 'targets' not in feed['info_data'] or ('type' in feed['info_data'] and feed['info_data']['type'] in ['all', 'cfd']): feed['info_data']['next_broadcast'] = util.next_interval_date(feed['info_data']['broadcast_date']) feed['info_data']['next_deadline'] = util.next_interval_date(feed['info_data']['deadline']) result = feed result['counters'] = get_feed_counters(feed['source']) if 'counters' not in result: params = { 'filters': { 'field': 'source', 'op': '=', 'value': url_or_address }, 'order_by': 'tx_index', 'order_dir': 'DESC', 'limit': 10 } broadcasts = util.call_jsonrpc_api('get_broadcasts', params)['result'] if broadcasts: return { 'broadcasts': broadcasts, 'counters': get_feed_counters(url_or_address) } return result
def parse_base64_feed(base64_feed): decoded_feed = base64.b64decode(base64_feed) feed = json.loads(decoded_feed) if isinstance(feed, dict) and 'feed' in feed: errors = util.is_valid_json(feed['feed'], config.FEED_SCHEMA) if len(errors) > 0: raise Exception("Invalid json: {}".format(", ".join(errors))) # get broadcast infos params = { 'filters': { 'field': 'source', 'op': '=', 'value': feed['feed']['address'] }, 'order_by': 'tx_index', 'order_dir': 'DESC', 'limit': 1 } broadcasts = util.call_jsonrpc_api('get_broadcasts', params)['result'] if len(broadcasts) == 0: raise Exception("invalid feed address") complete_feed = {} complete_feed['fee_fraction_int'] = broadcasts[0]['fee_fraction_int'] complete_feed['source'] = broadcasts[0]['source'] complete_feed['locked'] = broadcasts[0]['locked'] complete_feed['counters'] = get_feed_counters(broadcasts[0]['source']) complete_feed['info_data'] = sanitize_json_data(feed['feed']) feed['feed'] = complete_feed return feed
def find_feed(db, url_or_address): conditions = { '$or': [{'source': url_or_address}, {'info_url': url_or_address}], 'info_status': 'valid' } result = {} feeds = db.feeds.find(spec=conditions, fields={'_id': False}, limit=1) for feed in feeds: if 'targets' not in feed['info_data'] or ('type' in feed['info_data'] and feed['info_data']['type'] in ['all', 'cfd']): feed['info_data']['next_broadcast'] = util.next_interval_date(feed['info_data']['broadcast_date']) feed['info_data']['next_deadline'] = util.next_interval_date(feed['info_data']['deadline']) result = feed result['counters'] = get_feed_counters(feed['source']) if 'counters' not in result: params = { 'filters': { 'field': 'source', 'op': '=', 'value': url_or_address }, 'order_by': 'tx_index', 'order_dir': 'DESC', 'limit': 10 } broadcasts = util.call_jsonrpc_api('get_broadcasts', params)['result'] if broadcasts: return { 'broadcasts': broadcasts, 'counters': get_feed_counters(url_or_address) } return result
def get_asset_info(asset, at_dt=None): mongo_db = config.mongo_db asset_info = mongo_db.tracked_assets.find_one({'asset': asset}) if asset not in (config.XCP, config.BTC) and at_dt and asset_info['_at_block_time'] > at_dt: #get the asset info at or before the given at_dt datetime for e in reversed(asset_info['_history']): #newest to oldest if e['_at_block_time'] <= at_dt: asset_info = e break else: #asset was created AFTER at_dt asset_info = None if asset_info is None: return None assert asset_info['_at_block_time'] <= at_dt #modify some of the properties of the returned asset_info for BTC and XCP if asset == config.BTC: if at_dt: start_block_index, end_block_index = util.get_block_indexes_for_dates(end_dt=at_dt) asset_info['total_issued'] = util_bitcoin.get_btc_supply(normalize=False, at_block_index=end_block_index) asset_info['total_issued_normalized'] = util_bitcoin.normalize_quantity(asset_info['total_issued']) else: asset_info['total_issued'] = util_bitcoin.get_btc_supply(normalize=False) asset_info['total_issued_normalized'] = util_bitcoin.normalize_quantity(asset_info['total_issued']) elif asset == config.XCP: #BUG: this does not take end_dt (if specified) into account. however, the deviation won't be too big # as XCP doesn't deflate quickly at all, and shouldn't matter that much since there weren't any/much trades # before the end of the burn period (which is what is involved with how we use at_dt with currently) asset_info['total_issued'] = util.call_jsonrpc_api("get_xcp_supply", abort_on_error=True)['result'] asset_info['total_issued_normalized'] = util_bitcoin.normalize_quantity(asset_info['total_issued']) if not asset_info: raise Exception("Invalid asset: %s" % asset) return asset_info
def get_document_for_hash(hash_string='', hash_type=0): if hash: sql = 'SELECT * FROM documents WHERE hash_string = ? and hash_type = ? COLLATE NOCASE' params = {'query': sql, 'bindings': (hash_string, hash_type)} return util.call_jsonrpc_api('sql', params)['result'] return {}
def get_pairs_with_orders(addresses=[], max_pairs=12): pairs_with_orders = [] sources = '''AND source IN ({})'''.format(','.join(['?' for e in range(0,len(addresses))])) sql = '''SELECT (MIN(give_asset, get_asset) || '/' || MAX(give_asset, get_asset)) AS pair, COUNT(*) AS order_count FROM orders WHERE give_asset != get_asset AND status = ? {} GROUP BY pair ORDER BY order_count DESC LIMIT ?'''.format(sources) bindings = ['open'] + addresses + [max_pairs] my_pairs = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] for my_pair in my_pairs: base_asset, quote_asset = util.assets_to_asset_pair(*tuple(my_pair['pair'].split("/"))) top_pair = { 'base_asset': base_asset, 'quote_asset': quote_asset, 'my_order_count': my_pair['order_count'] } if my_pair['pair'] == 'WDC/XBJ': # XBJ/WDC always in first pairs_with_orders.insert(0, top_pair) else: pairs_with_orders.append(top_pair) return pairs_with_orders
def get_pairs_with_orders(addresses=[], max_pairs=12): pairs_with_orders = [] sources = '''AND source IN ({})'''.format(','.join(['?' for e in range(0,len(addresses))])) sql = '''SELECT (MIN(give_asset, get_asset) || '/' || MAX(give_asset, get_asset)) AS pair, COUNT(*) AS order_count FROM orders WHERE give_asset != get_asset AND status = ? {} GROUP BY pair ORDER BY order_count DESC LIMIT ?'''.format(sources) bindings = ['open'] + addresses + [max_pairs] my_pairs = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] for my_pair in my_pairs: base_asset, quote_asset = util.assets_to_asset_pair(*tuple(my_pair['pair'].split("/"))) top_pair = { 'base_asset': base_asset, 'quote_asset': quote_asset, 'my_order_count': my_pair['order_count'] } if my_pair['pair'] == 'DOGE/XDP': # XDP/DOGE always in first pairs_with_orders.insert(0, top_pair) else: pairs_with_orders.append(top_pair) return pairs_with_orders
def get_pairs_with_orders(addresses=[], max_pairs=12): pairs_with_orders = [] sources = """AND source IN ({})""".format(",".join(["?" for e in range(0, len(addresses))])) sql = """SELECT (MIN(give_asset, get_asset) || '/' || MAX(give_asset, get_asset)) AS pair, COUNT(*) AS order_count FROM orders WHERE give_asset != get_asset AND status = ? {} GROUP BY pair ORDER BY order_count DESC LIMIT ?""".format( sources ) bindings = ["open"] + addresses + [max_pairs] my_pairs = util.call_jsonrpc_api("sql", {"query": sql, "bindings": bindings})["result"] for my_pair in my_pairs: base_asset, quote_asset = util.assets_to_asset_pair(*tuple(my_pair["pair"].split("/"))) top_pair = {"base_asset": base_asset, "quote_asset": quote_asset, "my_order_count": my_pair["order_count"]} if my_pair["pair"] == "DOGE/XDP": # XDP/DOGE always in first pairs_with_orders.insert(0, top_pair) else: pairs_with_orders.append(top_pair) return pairs_with_orders
def publish_mempool_tx(): """fetch new tx from mempool""" tx_hashes = [] mempool_txs = mongo_db.mempool.find(fields={'tx_hash': True}) for mempool_tx in mempool_txs: tx_hashes.append(str(mempool_tx['tx_hash'])) params = None if len(tx_hashes) > 0: params = { 'filters': [ {'field':'tx_hash', 'op': 'NOT IN', 'value': tx_hashes}, {'field':'category', 'op': 'IN', 'value': ['sends', 'metpays', 'issuances', 'dividends', 'callbacks']} ], 'filterop': 'AND' } new_txs = util.call_jsonrpc_api("get_mempool", params, abort_on_error=True) for new_tx in new_txs['result']: tx = { 'tx_hash': new_tx['tx_hash'], 'command': new_tx['command'], 'category': new_tx['category'], 'bindings': new_tx['bindings'], 'timestamp': new_tx['timestamp'], 'viewed_in_block': config.CURRENT_BLOCK_INDEX } mongo_db.mempool.insert(tx) del(tx['_id']) tx['_category'] = tx['category'] tx['_message_index'] = 'mempool' logging.debug("Spotted mempool tx: %s" % tx) zmq_publisher_eventfeed.send_json(tx)
def get_document_state_for(addresses=[]): if isinstance(addresses, list) and len(addresses) > 0: sql = 'SELECT * FROM documents WHERE owner IN ({})'.format(','.join( ['?' for e in range(0, len(addresses))])) params = {'query': sql, 'bindings': addresses} return util.call_jsonrpc_api('sql', params)['result'] return []
def get_feed_counters(feed_address): counters = {} sql = 'SELECT COUNT(*) AS bet_count, SUM(wager_quantity) AS wager_quantity, SUM(wager_remaining) AS wager_remaining, status FROM bets ' sql += 'WHERE feed_address=? GROUP BY status ORDER BY status DESC' bindings = [feed_address] params = {'query': sql, 'bindings': bindings} counters['bets'] = util.call_jsonrpc_api('sql', params)['result'] return counters
def get_feed_counters(feed_address): counters = {} sql = "SELECT COUNT(*) AS bet_count, SUM(wager_quantity) AS wager_quantity, SUM(wager_remaining) AS wager_remaining, status FROM bets " sql += "WHERE feed_address=? GROUP BY status ORDER BY status DESC" bindings = [feed_address] params = {"query": sql, "bindings": bindings} counters["bets"] = util.call_jsonrpc_api("sql", params)["result"] return counters
def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provided=0.95, max_fee_required=0.95): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_orders = [] sql = '''SELECT orders.*, blocks.block_time FROM orders INNER JOIN blocks ON orders.block_index=blocks.block_index WHERE status = ? ''' bindings = ['open'] if len(addresses) > 0: sql += '''AND source IN ({}) '''.format(','.join( ['?' for e in range(0, len(addresses))])) bindings += addresses sql += '''AND give_remaining > 0 AND give_asset IN (?, ?) AND get_asset IN (?, ?) ORDER BY tx_index DESC''' bindings += [asset1, asset2, asset1, asset2] orders = util.call_jsonrpc_api('sql', { 'query': sql, 'bindings': bindings })['result'] for order in orders: user_order = {} exclude = False if order['give_asset'] == 'XTO': try: fee_provided = order['fee_provided'] / ( order['give_quantity'] / 100) user_order['fee_provided'] = format( D(order['fee_provided']) / (D(order['give_quantity']) / D(100)), '.2f') except Exception, e: fee_provided = min_fee_provided - 1 # exclude exclude = fee_provided < min_fee_provided elif order['get_asset'] == 'XTO': try: fee_required = order['fee_required'] / (order['get_quantity'] / 100) user_order['fee_required'] = format( D(order['fee_required']) / (D(order['get_quantity']) / D(100)), '.2f') except Exception, e: fee_required = max_fee_required + 1 # exclude
def get_document_for_hash(hash_string = '', hash_type = 0): if hash: sql = 'SELECT * FROM documents WHERE hash_string = ? and hash_type = ? COLLATE NOCASE' params = { 'query': sql, 'bindings': (hash_string, hash_type) } return util.call_jsonrpc_api('sql', params)['result'] return {}
def get_document_state_for(addresses = []): if isinstance(addresses, list) and len(addresses)>0: sql = 'SELECT * FROM documents WHERE owner IN ({})'.format(','.join(['?' for e in range(0,len(addresses))])) params = { 'query': sql, 'bindings': addresses } return util.call_jsonrpc_api('sql', params)['result'] return []
def get_xcp_or_btc_pairs(asset="XDP", exclude_pairs=[], max_pairs=12, from_time=None): bindings = [] sql = """SELECT (CASE WHEN forward_asset = ? THEN backward_asset ELSE forward_asset END) AS base_asset, (CASE WHEN backward_asset = ? THEN backward_asset ELSE forward_asset END) AS quote_asset, (CASE WHEN backward_asset = ? THEN (forward_asset || '/' || backward_asset) ELSE (backward_asset || '/' || forward_asset) END) AS pair, (CASE WHEN forward_asset = ? THEN SUM(backward_quantity) ELSE SUM(forward_quantity) END) AS base_quantity, (CASE WHEN backward_asset = ? THEN SUM(backward_quantity) ELSE SUM(forward_quantity) END) AS quote_quantity """ if from_time: sql += """, block_time """ sql += """FROM order_matches """ bindings += [asset, asset, asset, asset, asset] if from_time: sql += """INNER JOIN blocks ON order_matches.block_index = blocks.block_index """ if asset == "XDP": sql += """WHERE ((forward_asset = ? AND backward_asset != ?) OR (forward_asset != ? AND backward_asset = ?)) """ bindings += [asset, "DOGE", "DOGE", asset] else: sql += """WHERE ((forward_asset = ?) OR (backward_asset = ?)) """ bindings += [asset, asset] if len(exclude_pairs) > 0: sql += """AND pair NOT IN ({}) """.format(",".join(["?" for e in range(0, len(exclude_pairs))])) bindings += exclude_pairs if from_time: sql += """AND block_time > ? """ bindings += [from_time] sql += """AND forward_asset != backward_asset GROUP BY pair ORDER BY quote_quantity DESC LIMIT ?""" bindings += [max_pairs] return util.call_jsonrpc_api("sql", {"query": sql, "bindings": bindings})["result"]
def get_xcp_or_btc_pairs(asset='XCP', exclude_pairs=[], max_pairs=12, from_time=None): bindings = [] sql = '''SELECT (CASE WHEN forward_asset = ? THEN backward_asset ELSE forward_asset END) AS base_asset, (CASE WHEN backward_asset = ? THEN backward_asset ELSE forward_asset END) AS quote_asset, (CASE WHEN backward_asset = ? THEN (forward_asset || '/' || backward_asset) ELSE (backward_asset || '/' || forward_asset) END) AS pair, (CASE WHEN forward_asset = ? THEN SUM(backward_quantity) ELSE SUM(forward_quantity) END) AS base_quantity, (CASE WHEN backward_asset = ? THEN SUM(backward_quantity) ELSE SUM(forward_quantity) END) AS quote_quantity ''' if from_time: sql += ''', block_time ''' sql += '''FROM order_matches ''' bindings += [asset, asset, asset, asset, asset] if from_time: sql += '''INNER JOIN blocks ON order_matches.block_index = blocks.block_index ''' if asset == 'XCP': sql += '''WHERE ((forward_asset = ? AND backward_asset != ?) OR (forward_asset != ? AND backward_asset = ?)) ''' bindings += [asset, 'BTC', 'BTC', asset] else: sql += '''WHERE ((forward_asset = ?) OR (backward_asset = ?)) ''' bindings += [asset, asset] if len(exclude_pairs) > 0: sql += '''AND pair NOT IN ({}) '''.format(','.join(['?' for e in range(0,len(exclude_pairs))])) bindings += exclude_pairs if from_time: sql += '''AND block_time > ? ''' bindings += [from_time] sql += '''AND forward_asset != backward_asset GROUP BY pair ORDER BY quote_quantity DESC LIMIT ?''' bindings += [max_pairs] return util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result']
def get_documents_for(addresses=[]): if isinstance(addresses, list) and len(addresses) > 0: my_addresses = ','.join(['?' for e in range(0, len(addresses))]) sql = 'SELECT * FROM document_transactions WHERE source IN ({}) OR destination IN ({})'.format( my_addresses, my_addresses) # ugh, I miss ruby bindings = [] bindings += addresses + addresses params = {'query': sql, 'bindings': bindings} return util.call_jsonrpc_api('sql', params)['result'] return []
def get_feed_counters(feed_address): counters = {} sql = 'SELECT COUNT(*) AS bet_count, SUM(wager_quantity) AS wager_quantity, SUM(wager_remaining) AS wager_remaining, status FROM bets ' sql += 'WHERE feed_address=? GROUP BY status ORDER BY status DESC' bindings = [feed_address] params = { 'query': sql, 'bindings': bindings } counters['bets'] = util.call_jsonrpc_api('sql', params)['result'] return counters;
def find_bets(bet_type, feed_address, deadline, target_value=None, leverage=5040, limit=50): bindings = [] sql = "SELECT * FROM bets WHERE counterwager_remaining>0 AND " sql += "bet_type=? AND feed_address=? AND leverage=? AND deadline=? " bindings += [bet_type, feed_address, leverage, deadline] if target_value != None: sql += "AND target_value=? " bindings.append(target_value) sql += "ORDER BY ((counterwager_quantity+0.0)/(wager_quantity+0.0)) ASC LIMIT ?" bindings.append(limit) params = {"query": sql, "bindings": bindings} return util.call_jsonrpc_api("sql", params)["result"]
def get_documents_for(addresses = []): if isinstance(addresses, list) and len(addresses)>0: my_addresses = ','.join(['?' for e in range(0,len(addresses))]) sql = 'SELECT * FROM document_transactions WHERE source IN ({}) OR destination IN ({})'.format(my_addresses, my_addresses) # ugh, I miss ruby bindings = [] bindings += addresses + addresses params = { 'query': sql, 'bindings': bindings } return util.call_jsonrpc_api('sql', params)['result'] return []
def get_open_rps_count(possible_moves=3, exclude_addresses=[]): bindings = ['open', possible_moves] sql = 'SELECT wager, COUNT(*) AS game_count FROM rps WHERE status = ? AND possible_moves = ? ' if isinstance(exclude_addresses, list) and len(exclude_addresses) > 0: sql += 'AND source NOT IN ({}) '.format(','.join( ['?' for e in range(0, len(exclude_addresses))])) bindings += exclude_addresses sql += 'GROUP BY wager ORDER BY tx_index DESC' params = {'query': sql, 'bindings': bindings} return util.call_jsonrpc_api('sql', params)['result']
def find_bets(bet_type, feed_address, deadline, target_value=None, leverage=5040, limit=50): bindings = [] sql = 'SELECT * FROM bets WHERE counterwager_remaining>0 AND ' sql += 'bet_type=? AND feed_address=? AND leverage=? AND deadline=? ' bindings += [bet_type, feed_address, leverage, deadline] if target_value != None: sql += 'AND target_value=? ' bindings.append(target_value) sql += 'ORDER BY ((counterwager_quantity+0.0)/(wager_quantity+0.0)) ASC LIMIT ?'; bindings.append(limit) params = { 'query': sql, 'bindings': bindings } return util.call_jsonrpc_api('sql', params)['result']
def get_open_rps_count(possible_moves = 3, exclude_addresses = []): bindings = ['open', possible_moves] sql = 'SELECT wager, COUNT(*) AS game_count FROM rps WHERE status = ? AND possible_moves = ? ' if isinstance(exclude_addresses, list) and len(exclude_addresses)>0: sql += 'AND source NOT IN ({}) '.format(','.join(['?' for e in range(0,len(exclude_addresses))])) bindings += exclude_addresses sql += 'GROUP BY wager ORDER BY tx_index DESC' params = { 'query': sql, 'bindings': bindings } return util.call_jsonrpc_api('sql', params)['result']
def find_user_bets(db, addresses, status="open"): params = { "filters": {"field": "source", "op": "IN", "value": addresses}, "status": status, "order_by": "tx_index", "order_dir": "DESC", "limit": 100, } bets = util.call_jsonrpc_api("get_bets", params)["result"] sources = {} for bet in bets: sources[bet["feed_address"]] = True return {"bets": bets, "feeds": get_feeds_by_source(db, sources.keys())}
def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provided=0.95, max_fee_required=0.95): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_orders = [] sql = """SELECT orders.*, blocks.block_time FROM orders INNER JOIN blocks ON orders.block_index=blocks.block_index WHERE status = ? """ bindings = ["open"] if len(addresses) > 0: sql += """AND source IN ({}) """.format(",".join(["?" for e in range(0, len(addresses))])) bindings += addresses sql += """AND give_remaining > 0 AND give_asset IN (?, ?) AND get_asset IN (?, ?) ORDER BY tx_index DESC""" bindings += [asset1, asset2, asset1, asset2] orders = util.call_jsonrpc_api("sql", {"query": sql, "bindings": bindings})["result"] for order in orders: user_order = {} exclude = False if order["give_asset"] == "DOGE": try: fee_provided = order["fee_provided"] / (order["give_quantity"] / 100) user_order["fee_provided"] = format( D(order["fee_provided"]) / (D(order["give_quantity"]) / D(100)), ".2f" ) except Exception, e: fee_provided = min_fee_provided - 1 # exclude exclude = fee_provided < min_fee_provided elif order["get_asset"] == "DOGE": try: fee_required = order["fee_required"] / (order["get_quantity"] / 100) user_order["fee_required"] = format( D(order["fee_required"]) / (D(order["get_quantity"]) / D(100)), ".2f" ) except Exception, e: fee_required = max_fee_required + 1 # exclude
def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provided=0.95, max_fee_required=0.95): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_orders = [] buy_orders = [] sell_orders = [] sql = '''SELECT orders.*, blocks.block_time FROM orders INNER JOIN blocks ON orders.block_index=blocks.block_index WHERE status = ? ''' bindings = ['open'] if len(addresses) > 0: sql += '''AND source IN ({}) '''.format(','.join(['?' for e in range(0,len(addresses))])) bindings += addresses sql += '''AND give_remaining > 0 AND give_asset IN (?, ?) AND get_asset IN (?, ?) ORDER BY tx_index DESC''' bindings += [asset1, asset2, asset1, asset2] orders = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] for order in orders: market_order = {} exclude = False if order['give_asset'] == 'WDC': try: fee_provided = order['fee_provided'] / (order['give_quantity'] / 100) market_order['fee_provided'] = format(D(order['fee_provided']) / (D(order['give_quantity']) / D(100)), '.2f') except Exception, e: fee_provided = min_fee_provided - 1 # exclude exclude = fee_provided < min_fee_provided elif order['get_asset'] == 'WDC': try: fee_required = order['fee_required'] / (order['get_quantity'] / 100) market_order['fee_required'] = format(D(order['fee_required']) / (D(order['get_quantity']) / D(100)), '.2f') except Exception, e: fee_required = max_fee_required + 1 # exclude
def find_user_bets(db, addresses, status='open'): params = { 'filters': { 'field': 'source', 'op': 'IN', 'value': addresses }, 'status': status, 'order_by': 'tx_index', 'order_dir': 'DESC', 'limit': 100 } bets = util.call_jsonrpc_api('get_bets', params)['result'] sources = {} for bet in bets: sources[bet['feed_address']] = True return {'bets': bets, 'feeds': get_feeds_by_source(db, sources.keys())}
def get_asset_info(asset, at_dt=None): mongo_db = config.mongo_db asset_info = mongo_db.tracked_assets.find_one({'asset': asset}) if asset not in (config.XCP, config.BTC ) and at_dt and asset_info['_at_block_time'] > at_dt: #get the asset info at or before the given at_dt datetime for e in reversed(asset_info['_history']): #newest to oldest if e['_at_block_time'] <= at_dt: asset_info = e break else: #asset was created AFTER at_dt asset_info = None if asset_info is None: return None assert asset_info['_at_block_time'] <= at_dt if asset == config.BTC: if at_dt: start_block_index, end_block_index = util.get_block_indexes_for_dates( end_dt=at_dt) asset_info['total_issued'] = util_bitcoin.get_btc_supply( normalize=False, at_block_index=end_block_index) asset_info[ 'total_issued_normalized'] = util_bitcoin.normalize_quantity( asset_info['total_issued']) else: asset_info['total_issued'] = util_bitcoin.get_btc_supply( normalize=False) asset_info[ 'total_issued_normalized'] = util_bitcoin.normalize_quantity( asset_info['total_issued']) elif asset == config.XCP: asset_info['total_issued'] = util.call_jsonrpc_api( "get_xcp_supply", abort_on_error=True)['result'] asset_info[ 'total_issued_normalized'] = util_bitcoin.normalize_quantity( asset_info['total_issued']) if not asset_info: raise Exception("Invalid asset: %s" % asset) return asset_info
def get_pair_price(base_asset, quote_asset, max_block_time=None, supplies=None): if not supplies: supplies = get_assets_supply([base_asset, quote_asset]) sql = '''SELECT *, MAX(tx0_index, tx1_index) AS tx_index, blocks.block_time FROM order_matches INNER JOIN blocks ON order_matches.block_index = blocks.block_index WHERE forward_asset IN (?, ?) AND backward_asset IN (?, ?) ''' bindings = [base_asset, quote_asset, base_asset, quote_asset] if max_block_time: sql += '''AND block_time <= ? ''' bindings += [max_block_time] sql += '''ORDER BY tx_index DESC LIMIT 2''' order_matches = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] if len(order_matches) == 0: last_price = D(0.0) elif order_matches[0]['forward_asset'] == base_asset: last_price = calculate_price(order_matches[0]['forward_quantity'], order_matches[0]['backward_quantity'], supplies[order_matches[0]['forward_asset']][1], supplies[order_matches[0]['backward_asset']][1]) else: last_price = calculate_price(order_matches[0]['backward_quantity'], order_matches[0]['forward_quantity'], supplies[order_matches[0]['backward_asset']][1], supplies[order_matches[0]['forward_asset']][1]) trend = 0 if len(order_matches) == 2: if order_matches[1]['forward_asset'] == base_asset: before_last_price = calculate_price(order_matches[0]['forward_quantity'], order_matches[0]['backward_quantity'], supplies[order_matches[0]['forward_asset']][1], supplies[order_matches[0]['backward_asset']][1]) else: before_last_price = calculate_price(order_matches[0]['backward_quantity'], order_matches[0]['forward_quantity'], supplies[order_matches[0]['backward_asset']][1], supplies[order_matches[0]['forward_asset']][1]) if last_price < before_last_price: trend = -1 elif last_price > before_last_price: trend = 1 return D(last_price), trend
def find_user_bets(db, addresses, status='open'): params = { 'filters': { 'field': 'source', 'op': 'IN', 'value': addresses }, 'status': status, 'order_by': 'tx_index', 'order_dir': 'DESC', 'limit': 100 } bets = util.call_jsonrpc_api('get_bets', params)['result'] sources = {} for bet in bets: sources[bet['feed_address']] = True return { 'bets': bets, 'feeds': get_feeds_by_source(db, sources.keys()) }
def publish_mempool_tx(): """fetch new tx from mempool""" tx_hashes = [] mempool_txs = mongo_db.mempool.find(fields={"tx_hash": True}) for mempool_tx in mempool_txs: tx_hashes.append(str(mempool_tx["tx_hash"])) params = None if len(tx_hashes) > 0: params = { "filters": [ {"field": "tx_hash", "op": "NOT IN", "value": tx_hashes}, { "field": "category", "op": "IN", "value": ["sends", "btcpays", "issuances", "dividends", "callbacks"], }, ], "filterop": "AND", } new_txs = util.call_jsonrpc_api("get_mempool", params, abort_on_error=True) for new_tx in new_txs["result"]: tx = { "tx_hash": new_tx["tx_hash"], "command": new_tx["command"], "category": new_tx["category"], "bindings": new_tx["bindings"], "timestamp": new_tx["timestamp"], "viewed_in_block": config.CURRENT_BLOCK_INDEX, } mongo_db.mempool.insert(tx) del (tx["_id"]) tx["_category"] = tx["category"] tx["_message_index"] = "mempool" logging.debug("Spotted mempool tx: %s" % tx) zmq_publisher_eventfeed.send_json(tx)
def get_market_trades(asset1, asset2, addresses=[], limit=50, supplies=None): limit = min(limit, 100) base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_trades = [] sources = '' bindings = ['expired'] if len(addresses) > 0: placeholder = ','.join(['?' for e in range(0,len(addresses))]) sources = '''AND (tx0_address IN ({}) OR tx1_address IN ({}))'''.format(placeholder, placeholder) bindings += addresses + addresses sql = '''SELECT order_matches.*, blocks.block_time FROM order_matches INNER JOIN blocks ON order_matches.block_index=blocks.block_index WHERE status != ? {} AND forward_asset IN (?, ?) AND backward_asset IN (?, ?) ORDER BY block_index DESC LIMIT ?'''.format(sources) bindings += [asset1, asset2, asset1, asset2, limit] order_matches = util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result'] for order_match in order_matches: if order_match['tx0_address'] in addresses: trade = {} trade['match_id'] = order_match['id'] trade['source'] = order_match['tx0_address'] trade['countersource'] = order_match['tx1_address'] trade['block_index'] = order_match['block_index'] trade['block_time'] = order_match['block_time'] trade['status'] = order_match['status'] if order_match['forward_asset'] == base_asset: trade['type'] = 'SELL' trade['price'] = calculate_price(order_match['forward_quantity'], order_match['backward_quantity'], supplies[order_match['forward_asset']][1], supplies[order_match['backward_asset']][1], 'SELL') trade['amount'] = order_match['forward_quantity'] trade['total'] = order_match['backward_quantity'] else: trade['type'] = 'BUY' trade['price'] = calculate_price(order_match['backward_quantity'], order_match['forward_quantity'], supplies[order_match['backward_asset']][1], supplies[order_match['forward_asset']][1], 'BUY') trade['amount'] = order_match['backward_quantity'] trade['total'] = order_match['forward_quantity'] market_trades.append(trade) if len(addresses)==0 or order_match['tx1_address'] in addresses: trade = {} trade['match_id'] = order_match['id'] trade['source'] = order_match['tx1_address'] trade['countersource'] = order_match['tx0_address'] trade['block_index'] = order_match['block_index'] trade['block_time'] = order_match['block_time'] trade['status'] = order_match['status'] if order_match['backward_asset'] == base_asset: trade['type'] = 'SELL' trade['price'] = calculate_price(order_match['backward_quantity'], order_match['forward_quantity'], supplies[order_match['backward_asset']][1], supplies[order_match['forward_asset']][1], 'SELL') trade['amount'] = order_match['backward_quantity'] trade['total'] = order_match['forward_quantity'] else: trade['type'] = 'BUY' trade['price'] = calculate_price(order_match['forward_quantity'], order_match['backward_quantity'], supplies[order_match['forward_asset']][1], supplies[order_match['backward_asset']][1], 'BUY') trade['amount'] = order_match['forward_quantity'] trade['total'] = order_match['backward_quantity'] market_trades.append(trade) return market_trades
def get_market_trades(asset1, asset2, addresses=[], limit=100, supplies=None): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_trades = [] sources = '' bindings = ['expired'] if len(addresses) > 0: placeholder = ','.join(['?' for e in range(0, len(addresses))]) sources = '''AND (tx0_address IN ({}) OR tx1_address IN ({}))'''.format( placeholder, placeholder) bindings += addresses + addresses sql = '''SELECT order_matches.*, blocks.block_time FROM order_matches INNER JOIN blocks ON order_matches.block_index=blocks.block_index WHERE status != ? {} AND forward_asset IN (?, ?) AND backward_asset IN (?, ?) ORDER BY block_index DESC'''.format(sources) bindings += [asset1, asset2, asset1, asset2] order_matches = util.call_jsonrpc_api('sql', { 'query': sql, 'bindings': bindings })['result'] for order_match in order_matches: if order_match['tx0_address'] in addresses: trade = {} trade['match_id'] = order_match['id'] trade['source'] = order_match['tx0_address'] trade['countersource'] = order_match['tx1_address'] trade['block_index'] = order_match['block_index'] trade['block_time'] = order_match['block_time'] trade['status'] = order_match['status'] if order_match['forward_asset'] == base_asset: trade['type'] = 'SELL' trade['price'] = format_price( order_match['forward_quantity'], order_match['backward_quantity'], supplies[order_match['forward_asset']][1], supplies[order_match['backward_asset']][1]) trade['amount'] = order_match['forward_quantity'] trade['total'] = order_match['backward_quantity'] else: trade['type'] = 'BUY' trade['price'] = format_price( order_match['backward_quantity'], order_match['forward_quantity'], supplies[order_match['backward_asset']][1], supplies[order_match['forward_asset']][1]) trade['amount'] = order_match['backward_quantity'] trade['total'] = order_match['forward_quantity'] market_trades.append(trade) if len(addresses) == 0 or order_match['tx1_address'] in addresses: trade = {} trade['match_id'] = order_match['id'] trade['source'] = order_match['tx1_address'] trade['countersource'] = order_match['tx0_address'] trade['block_index'] = order_match['block_index'] trade['block_time'] = order_match['block_time'] trade['status'] = order_match['status'] if order_match['backward_asset'] == base_asset: trade['type'] = 'SELL' trade['price'] = format_price( order_match['backward_quantity'], order_match['forward_quantity'], supplies[order_match['backward_asset']][1], supplies[order_match['forward_asset']][1]) trade['amount'] = order_match['backward_quantity'] trade['total'] = order_match['forward_quantity'] else: trade['type'] = 'BUY' trade['price'] = format_price( order_match['forward_quantity'], order_match['backward_quantity'], supplies[order_match['forward_asset']][1], supplies[order_match['backward_asset']][1]) trade['amount'] = order_match['forward_quantity'] trade['total'] = order_match['backward_quantity'] market_trades.append(trade) return market_trades
def compile_asset_pair_market_info(): mongo_db = config.mongo_db end_dt = datetime.datetime.utcnow() start_dt = end_dt - datetime.timedelta(days=1) start_block_index, end_block_index = util.get_block_indexes_for_dates( start_dt=start_dt, end_dt=end_dt) open_orders = util.call_jsonrpc_api("get_orders", { 'filters': [ { 'field': 'give_remaining', 'op': '>', 'value': 0 }, { 'field': 'get_remaining', 'op': '>', 'value': 0 }, { 'field': 'fee_required_remaining', 'op': '>=', 'value': 0 }, { 'field': 'fee_provided_remaining', 'op': '>=', 'value': 0 }, ], 'status': 'open', 'show_expired': False, }, abort_on_error=True)['result'] pair_data = {} asset_info = {} def get_price(base_quantity_normalized, quote_quantity_normalized): return float(D(quote_quantity_normalized / base_quantity_normalized)) #COMPOSE order depth, lowest ask, and highest bid column data for o in open_orders: (base_asset, quote_asset) = util.assets_to_asset_pair(o['give_asset'], o['get_asset']) pair = '%s/%s' % (base_asset, quote_asset) base_asset_info = asset_info.get( base_asset, mongo_db.tracked_assets.find_one({'asset': base_asset})) if base_asset not in asset_info: asset_info[base_asset] = base_asset_info quote_asset_info = asset_info.get( quote_asset, mongo_db.tracked_assets.find_one({'asset': quote_asset})) if quote_asset not in asset_info: asset_info[quote_asset] = quote_asset_info pair_data.setdefault( pair, { 'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None, 'completed_trades_count': 0, 'vol_base': 0, 'vol_quote': 0 }) pair_data[pair]['open_orders_count'] += 1 base_quantity_normalized = util_bitcoin.normalize_quantity( o['give_quantity'] if base_asset == o['give_asset'] else o['get_quantity'], base_asset_info['divisible']) quote_quantity_normalized = util_bitcoin.normalize_quantity( o['give_quantity'] if quote_asset == o['give_asset'] else o['get_quantity'], quote_asset_info['divisible']) order_price = get_price(base_quantity_normalized, quote_quantity_normalized) if base_asset == o['give_asset']: #selling base if pair_data[pair]['lowest_ask'] is None or order_price < pair_data[ pair]['lowest_ask']: pair_data[pair]['lowest_ask'] = order_price elif base_asset == o['get_asset']: #buying base if pair_data[pair][ 'highest_bid'] is None or order_price > pair_data[pair][ 'highest_bid']: pair_data[pair]['highest_bid'] = order_price trades_data_by_pair = mongo_db.trades.aggregate([ { "$match": { "block_time": { "$gte": start_dt, "$lte": end_dt } } }, { "$project": { "base_asset": 1, "quote_asset": 1, "base_quantity_normalized": 1, #to derive base volume "quote_quantity_normalized": 1 #to derive quote volume } }, { "$group": { "_id": { "base_asset": "$base_asset", "quote_asset": "$quote_asset" }, "vol_base": { "$sum": "$base_quantity_normalized" }, "vol_quote": { "$sum": "$quote_quantity_normalized" }, "count": { "$sum": 1 }, } } ]) for e in trades_data_by_pair: pair = '%s/%s' % (e['_id']['base_asset'], e['_id']['quote_asset']) pair_data.setdefault(pair, { 'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None }) #^ initialize an empty pair in the event there are no open orders for that pair, but there ARE completed trades for it pair_data[pair]['completed_trades_count'] = e['count'] pair_data[pair]['vol_base'] = e['vol_base'] pair_data[pair]['vol_quote'] = e['vol_quote'] mps_xcp_btc, xcp_btc_price, btc_xcp_price = get_price_primatives() for pair, e in pair_data.items(): base_asset, quote_asset = pair.split('/') _24h_vol_in_btc = None _24h_vol_in_xcp = None if base_asset == config.XCP: _24h_vol_in_xcp = e['vol_base'] _24h_vol_in_btc = util_bitcoin.round_out( e['vol_base'] * xcp_btc_price) if xcp_btc_price else 0 elif base_asset == config.BTC: _24h_vol_in_xcp = util_bitcoin.round_out( e['vol_base'] * btc_xcp_price) if btc_xcp_price else 0 _24h_vol_in_btc = e['vol_base'] else: price_summary_in_xcp, price_summary_in_btc, price_in_xcp, price_in_btc, aggregated_price_in_xcp, aggregated_price_in_btc = \ get_xcp_btc_price_info(base_asset, mps_xcp_btc, xcp_btc_price, btc_xcp_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if price_in_xcp: _24h_vol_in_xcp = util_bitcoin.round_out(e['vol_base'] * price_in_xcp) if price_in_btc: _24h_vol_in_btc = util_bitcoin.round_out(e['vol_base'] * price_in_btc) if _24h_vol_in_xcp is None or _24h_vol_in_btc is None: price_summary_in_xcp, price_summary_in_btc, price_in_xcp, price_in_btc, aggregated_price_in_xcp, aggregated_price_in_btc = \ get_xcp_btc_price_info(quote_asset, mps_xcp_btc, xcp_btc_price, btc_xcp_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if _24h_vol_in_xcp is None and price_in_xcp: _24h_vol_in_xcp = util_bitcoin.round_out(e['vol_quote'] * price_in_xcp) if _24h_vol_in_btc is None and price_in_btc: _24h_vol_in_btc = util_bitcoin.round_out(e['vol_quote'] * price_in_btc) pair_data[pair]['24h_vol_in_{}'.format( config.XCP.lower())] = _24h_vol_in_xcp #might still be None pair_data[pair]['24h_vol_in_{}'.format( config.BTC.lower())] = _24h_vol_in_btc #might still be None #get % change stats -- start by getting the first trade directly before the 24h period starts prev_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset, "block_time": { '$lt': start_dt } }).sort('block_time', pymongo.DESCENDING).limit(1) latest_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset }).sort('block_time', pymongo.DESCENDING).limit(1) if not prev_trade.count(): #no previous trade before this 24hr period pair_data[pair]['24h_pct_change'] = None else: prev_trade = prev_trade[0] latest_trade = latest_trade[0] prev_trade_price = get_price( prev_trade['base_quantity_normalized'], prev_trade['quote_quantity_normalized']) latest_trade_price = get_price( latest_trade['base_quantity_normalized'], latest_trade['quote_quantity_normalized']) pair_data[pair]['24h_pct_change'] = ( (latest_trade_price - prev_trade_price) / prev_trade_price) * 100 pair_data[pair]['last_updated'] = end_dt mongo_db.asset_pair_market_info.update( { 'base_asset': base_asset, 'quote_asset': quote_asset }, {"$set": pair_data[pair]}, upsert=True) #remove any old pairs that were not just updated mongo_db.asset_pair_market_info.remove({'last_updated': {'$lt': end_dt}}) logging.info("Recomposed 24h trade statistics for %i asset pairs: %s" % (len(pair_data), ', '.join(list(pair_data.keys()))))
def get_pairs(quote_asset='XLT', exclude_pairs=[], max_pairs=12, from_time=None): bindings = [] sql = '''SELECT (CASE WHEN forward_asset = ? THEN backward_asset ELSE forward_asset END) AS base_asset, (CASE WHEN backward_asset = ? THEN backward_asset ELSE forward_asset END) AS quote_asset, (CASE WHEN backward_asset = ? THEN (forward_asset || '/' || backward_asset) ELSE (backward_asset || '/' || forward_asset) END) AS pair, (CASE WHEN forward_asset = ? THEN backward_quantity ELSE forward_quantity END) AS bq, (CASE WHEN backward_asset = ? THEN backward_quantity ELSE forward_quantity END) AS qq ''' if from_time: sql += ''', block_time ''' sql += '''FROM order_matches ''' bindings += [ quote_asset, quote_asset, quote_asset, quote_asset, quote_asset ] if from_time: sql += '''INNER JOIN blocks ON order_matches.block_index = blocks.block_index ''' priority_quote_assets = [] for priority_quote_asset in config.QUOTE_ASSETS: if priority_quote_asset != quote_asset: priority_quote_assets.append(priority_quote_asset) else: break if len(priority_quote_assets) > 0: asset_bindings = ','.join( ['?' for e in range(0, len(priority_quote_assets))]) sql += '''WHERE ((forward_asset = ? AND backward_asset NOT IN ({})) OR (forward_asset NOT IN ({}) AND backward_asset = ?)) '''.format( asset_bindings, asset_bindings) bindings += [ quote_asset ] + priority_quote_assets + priority_quote_assets + [quote_asset] else: sql += '''WHERE ((forward_asset = ?) OR (backward_asset = ?)) ''' bindings += [quote_asset, quote_asset] if len(exclude_pairs) > 0: sql += '''AND pair NOT IN ({}) '''.format(','.join( ['?' for e in range(0, len(exclude_pairs))])) bindings += exclude_pairs if from_time: sql += '''AND block_time > ? ''' bindings += [from_time] sql += '''AND forward_asset != backward_asset AND status = ?''' bindings += ['completed', max_pairs] sql = '''SELECT base_asset, quote_asset, pair, SUM(bq) AS base_quantity, SUM(qq) AS quote_quantity FROM ({}) GROUP BY pair ORDER BY quote_quantity DESC LIMIT ?'''.format(sql) return util.call_jsonrpc_api('sql', { 'query': sql, 'bindings': bindings })['result']
def compile_asset_pair_market_info(): """Compiles the pair-level statistics that show on the View Prices page of counterwallet, for instance""" #loop through all open orders, and compile a listing of pairs, with a count of open orders for each pair mongo_db = config.mongo_db end_dt = datetime.datetime.utcnow() start_dt = end_dt - datetime.timedelta(days=1) start_block_index, end_block_index = util.get_block_indexes_for_dates(start_dt=start_dt, end_dt=end_dt) open_orders = util.call_jsonrpc_api("get_orders", { 'filters': [ {'field': 'give_remaining', 'op': '>', 'value': 0}, {'field': 'get_remaining', 'op': '>', 'value': 0}, {'field': 'fee_required_remaining', 'op': '>=', 'value': 0}, {'field': 'fee_provided_remaining', 'op': '>=', 'value': 0}, ], 'status': 'open', 'show_expired': False, }, abort_on_error=True)['result'] pair_data = {} asset_info = {} def get_price(base_quantity_normalized, quote_quantity_normalized): return float(D(quote_quantity_normalized / base_quantity_normalized )) #COMPOSE order depth, lowest ask, and highest bid column data for o in open_orders: (base_asset, quote_asset) = util.assets_to_asset_pair(o['give_asset'], o['get_asset']) pair = '%s/%s' % (base_asset, quote_asset) base_asset_info = asset_info.get(base_asset, mongo_db.tracked_assets.find_one({ 'asset': base_asset })) if base_asset not in asset_info: asset_info[base_asset] = base_asset_info quote_asset_info = asset_info.get(quote_asset, mongo_db.tracked_assets.find_one({ 'asset': quote_asset })) if quote_asset not in asset_info: asset_info[quote_asset] = quote_asset_info pair_data.setdefault(pair, {'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None, 'completed_trades_count': 0, 'vol_base': 0, 'vol_quote': 0}) #^ highest ask = open order selling base, highest bid = open order buying base #^ we also initialize completed_trades_count, vol_base, vol_quote because every pair inited here may # not have cooresponding data out of the trades_data_by_pair aggregation below pair_data[pair]['open_orders_count'] += 1 base_quantity_normalized = util_bitcoin.normalize_quantity(o['give_quantity'] if base_asset == o['give_asset'] else o['get_quantity'], base_asset_info['divisible']) quote_quantity_normalized = util_bitcoin.normalize_quantity(o['give_quantity'] if quote_asset == o['give_asset'] else o['get_quantity'], quote_asset_info['divisible']) order_price = get_price(base_quantity_normalized, quote_quantity_normalized) if base_asset == o['give_asset']: #selling base if pair_data[pair]['lowest_ask'] is None or order_price < pair_data[pair]['lowest_ask']: pair_data[pair]['lowest_ask'] = order_price elif base_asset == o['get_asset']: #buying base if pair_data[pair]['highest_bid'] is None or order_price > pair_data[pair]['highest_bid']: pair_data[pair]['highest_bid'] = order_price #COMPOSE volume data (in XCP and BTC), and % change data #loop through all trade volume over the past 24h, and match that to the open orders trades_data_by_pair = mongo_db.trades.aggregate([ {"$match": { "block_time": {"$gte": start_dt, "$lte": end_dt } } }, {"$project": { "base_asset": 1, "quote_asset": 1, "base_quantity_normalized": 1, #to derive base volume "quote_quantity_normalized": 1 #to derive quote volume }}, {"$group": { "_id": {"base_asset": "$base_asset", "quote_asset": "$quote_asset"}, "vol_base": {"$sum": "$base_quantity_normalized"}, "vol_quote": {"$sum": "$quote_quantity_normalized"}, "count": {"$sum": 1}, }} ]) trades_data_by_pair = [] if not trades_data_by_pair['ok'] else trades_data_by_pair['result'] for e in trades_data_by_pair: pair = '%s/%s' % (e['_id']['base_asset'], e['_id']['quote_asset']) pair_data.setdefault(pair, {'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None}) #^ initialize an empty pair in the event there are no open orders for that pair, but there ARE completed trades for it pair_data[pair]['completed_trades_count'] = e['count'] pair_data[pair]['vol_base'] = e['vol_base'] pair_data[pair]['vol_quote'] = e['vol_quote'] #compose price data, relative to BTC and XCP mps_xcp_btc, xcp_btc_price, btc_xcp_price = get_price_primatives() for pair, e in pair_data.iteritems(): base_asset, quote_asset = pair.split('/') _24h_vol_in_btc = None _24h_vol_in_xcp = None #derive asset price data, expressed in BTC and XCP, for the given volumes if base_asset == config.XCP: _24h_vol_in_xcp = e['vol_base'] _24h_vol_in_btc = util_bitcoin.round_out(e['vol_base'] * xcp_btc_price) if xcp_btc_price else 0 elif base_asset == config.BTC: _24h_vol_in_xcp = util_bitcoin.round_out(e['vol_base'] * btc_xcp_price) if btc_xcp_price else 0 _24h_vol_in_btc = e['vol_base'] else: #base is not XCP or BTC price_summary_in_xcp, price_summary_in_btc, price_in_xcp, price_in_btc, aggregated_price_in_xcp, aggregated_price_in_btc = \ get_xcp_btc_price_info(base_asset, mps_xcp_btc, xcp_btc_price, btc_xcp_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if price_in_xcp: _24h_vol_in_xcp = util_bitcoin.round_out(e['vol_base'] * price_in_xcp) if price_in_btc: _24h_vol_in_btc = util_bitcoin.round_out(e['vol_base'] * price_in_btc) if _24h_vol_in_xcp is None or _24h_vol_in_btc is None: #the base asset didn't have price data against BTC or XCP, or both...try against the quote asset instead price_summary_in_xcp, price_summary_in_btc, price_in_xcp, price_in_btc, aggregated_price_in_xcp, aggregated_price_in_btc = \ get_xcp_btc_price_info(quote_asset, mps_xcp_btc, xcp_btc_price, btc_xcp_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if _24h_vol_in_xcp is None and price_in_xcp: _24h_vol_in_xcp = util_bitcoin.round_out(e['vol_quote'] * price_in_xcp) if _24h_vol_in_btc is None and price_in_btc: _24h_vol_in_btc = util_bitcoin.round_out(e['vol_quote'] * price_in_btc) pair_data[pair]['24h_vol_in_{}'.format(config.XCP.lower())] = _24h_vol_in_xcp #might still be None pair_data[pair]['24h_vol_in_{}'.format(config.BTC.lower())] = _24h_vol_in_btc #might still be None #get % change stats -- start by getting the first trade directly before the 24h period starts prev_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset, "block_time": {'$lt': start_dt}}).sort('block_time', pymongo.DESCENDING).limit(1) latest_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset}).sort('block_time', pymongo.DESCENDING).limit(1) if not prev_trade.count(): #no previous trade before this 24hr period pair_data[pair]['24h_pct_change'] = None else: prev_trade = prev_trade[0] latest_trade = latest_trade[0] prev_trade_price = get_price(prev_trade['base_quantity_normalized'], prev_trade['quote_quantity_normalized']) latest_trade_price = get_price(latest_trade['base_quantity_normalized'], latest_trade['quote_quantity_normalized']) pair_data[pair]['24h_pct_change'] = ((latest_trade_price - prev_trade_price) / prev_trade_price) * 100 pair_data[pair]['last_updated'] = end_dt #print "PRODUCED", pair, pair_data[pair] mongo_db.asset_pair_market_info.update( {'base_asset': base_asset, 'quote_asset': quote_asset}, {"$set": pair_data[pair]}, upsert=True) #remove any old pairs that were not just updated mongo_db.asset_pair_market_info.remove({'last_updated': {'$lt': end_dt}}) logging.info("Recomposed 24h trade statistics for %i asset pairs: %s" % (len(pair_data), ', '.join(pair_data.keys())))
def get_escrowed_balances(addresses): addresses_holder = ','.join(['?' for e in range(0, len(addresses))]) sql = '''SELECT (source || '_' || give_asset) AS source_asset, source AS address, give_asset AS asset, SUM(give_remaining) AS quantity FROM orders WHERE source IN ({}) AND status = ? AND give_asset != ? GROUP BY source_asset'''.format(addresses_holder) bindings = addresses + ['open', 'SFR+'] results = util.call_jsonrpc_api("sql", { 'query': sql, 'bindings': bindings }, abort_on_error=True)['result'] sql = '''SELECT (tx0_address || '_' || forward_asset) AS source_asset, tx0_address AS address, forward_asset AS asset, SUM(forward_quantity) AS quantity FROM order_matches WHERE tx0_address IN ({}) AND forward_asset != ? AND status = ? GROUP BY source_asset'''.format(addresses_holder) bindings = addresses + ['SFR', 'pending'] results += util.call_jsonrpc_api("sql", { 'query': sql, 'bindings': bindings }, abort_on_error=True)['result'] sql = '''SELECT (tx1_address || '_' || backward_asset) AS source_asset, tx1_address AS address, backward_asset AS asset, SUM(backward_quantity) AS quantity FROM order_matches WHERE tx1_address IN ({}) AND backward_asset != ? AND status = ? GROUP BY source_asset'''.format(addresses_holder) bindings = addresses + ['SFR', 'pending'] results += util.call_jsonrpc_api("sql", { 'query': sql, 'bindings': bindings }, abort_on_error=True)['result'] sql = '''SELECT source AS address, '{}' AS asset, SUM(wager_remaining) AS quantity FROM bets WHERE source IN ({}) AND status = ? GROUP BY address'''.format(config.XCP, addresses_holder) bindings = addresses + ['open'] results += util.call_jsonrpc_api("sql", { 'query': sql, 'bindings': bindings }, abort_on_error=True)['result'] sql = '''SELECT tx0_address AS address, '{}' AS asset, SUM(forward_quantity) AS quantity FROM bet_matches WHERE tx0_address IN ({}) AND status = ? GROUP BY address'''.format(config.XCP, addresses_holder) bindings = addresses + ['pending'] results += util.call_jsonrpc_api("sql", { 'query': sql, 'bindings': bindings }, abort_on_error=True)['result'] sql = '''SELECT tx1_address AS address, '{}' AS asset, SUM(backward_quantity) AS quantity FROM bet_matches WHERE tx1_address IN ({}) AND status = ? GROUP BY address'''.format(config.XCP, addresses_holder) bindings = addresses + ['pending'] results += util.call_jsonrpc_api("sql", { 'query': sql, 'bindings': bindings }, abort_on_error=True)['result'] sql = '''SELECT source AS address, '{}' AS asset, SUM(wager) AS quantity FROM rps WHERE source IN ({}) AND status = ? GROUP BY address'''.format(config.XCP, addresses_holder) bindings = addresses + ['open'] results += util.call_jsonrpc_api("sql", { 'query': sql, 'bindings': bindings }, abort_on_error=True)['result'] sql = '''SELECT tx0_address AS address, '{}' AS asset, SUM(wager) AS quantity FROM rps_matches WHERE tx0_address IN ({}) AND status IN (?, ?, ?) GROUP BY address'''.format(config.XCP, addresses_holder) bindings = addresses + [ 'pending', 'pending and resolved', 'resolved and pending' ] results += util.call_jsonrpc_api("sql", { 'query': sql, 'bindings': bindings }, abort_on_error=True)['result'] sql = '''SELECT tx1_address AS address, '{}' AS asset, SUM(wager) AS quantity FROM rps_matches WHERE tx1_address IN ({}) AND status IN (?, ?, ?) GROUP BY address'''.format(config.XCP, addresses_holder) bindings = addresses + [ 'pending', 'pending and resolved', 'resolved and pending' ] results += util.call_jsonrpc_api("sql", { 'query': sql, 'bindings': bindings }, abort_on_error=True)['result'] escrowed_balances = {} for order in results: if order['address'] not in escrowed_balances: escrowed_balances[order['address']] = {} if order['asset'] not in escrowed_balances[order['address']]: escrowed_balances[order['address']][order['asset']] = 0 escrowed_balances[order['address']][ order['asset']] += order['quantity'] return escrowed_balances
def process_cpd_blockfeed(): LATEST_BLOCK_INIT = { 'block_index': config.BLOCK_FIRST, 'block_time': None, 'block_hash': None } mongo_db = config.mongo_db def blow_away_db(): mongo_db.processed_blocks.drop() mongo_db.tracked_assets.drop() mongo_db.trades.drop() mongo_db.balance_changes.drop() mongo_db.asset_market_info.drop() mongo_db.asset_marketcap_history.drop() mongo_db.pair_market_info.drop() mongo_db.btc_open_orders.drop() mongo_db.asset_extended_info.drop() mongo_db.transaction_stats.drop() mongo_db.feeds.drop() mongo_db.wallet_stats.drop() mongo_db.wallet_messages.drop() mongo_db.app_config.update( {}, { 'db_version': config.DB_VERSION, 'running_testnet': config.TESTNET, 'counterpartyd_db_version_major': None, 'counterpartyd_db_version_minor': None, 'counterpartyd_running_testnet': None, 'last_block_assets_compiled': config. BLOCK_FIRST, #for asset data compilation in events.py (resets on reparse as well) }, upsert=True) app_config = mongo_db.app_config.find()[0] for asset in [config.XCP, config.BTC]: base_asset = { 'asset': asset, 'owner': None, 'divisible': True, 'locked': False, 'total_issued': None, '_at_block': config.BLOCK_FIRST, #the block ID this asset is current for '_history': [] #to allow for block rollbacks } mongo_db.tracked_assets.insert(base_asset) mongo_db.wallet_messages.insert({ '_id': 0, 'when': calendar.timegm(time.gmtime()), 'message': None, }) #reinitialize some internal counters config.CURRENT_BLOCK_INDEX = 0 config.LAST_MESSAGE_INDEX = -1 config.cw_last_message_seq = 0 return app_config def prune_my_stale_blocks(max_block_index): assert isinstance(max_block_index, int) if max_block_index <= config.BLOCK_FIRST: max_block_index = config.BLOCK_FIRST + 1 if not mongo_db.processed_blocks.find_one( {"block_index": max_block_index}): raise Exception( "Can't roll back to specified block index: %i doesn't exist in database" % max_block_index) logging.warn("Pruning to block %i ..." % (max_block_index)) mongo_db.processed_blocks.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.balance_changes.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.trades.remove({"block_index": {"$gt": max_block_index}}) mongo_db.asset_marketcap_history.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.transaction_stats.remove( {"block_index": { "$gt": max_block_index }}) #to roll back the state of the tracked asset, dive into the history object for each asset that has # been updated on or after the block that we are pruning back to assets_to_prune = mongo_db.tracked_assets.find( {'_at_block': { "$gt": max_block_index }}) for asset in assets_to_prune: logging.info( "Pruning asset %s (last modified @ block %i, pruning to state at block %i)" % (asset['asset'], asset['_at_block'], max_block_index)) prev_ver = None while len(asset['_history']): prev_ver = asset['_history'].pop() if prev_ver['_at_block'] <= max_block_index: break if prev_ver: if prev_ver['_at_block'] > max_block_index: #even the first history version is newer than max_block_index. #in this case, just remove the asset tracking record itself mongo_db.tracked_assets.remove({'asset': asset['asset']}) else: #if here, we were able to find a previous version that was saved at or before max_block_index # (which should be prev_ver ... restore asset's values to its values prev_ver['_id'] = asset['_id'] prev_ver['_history'] = asset['_history'] mongo_db.tracked_assets.save(prev_ver) config.LAST_MESSAGE_INDEX = -1 config.CAUGHT_UP = False latest_block = mongo_db.processed_blocks.find_one( {"block_index": max_block_index}) return latest_block def publish_mempool_tx(): """fetch new tx from mempool""" tx_hashes = [] mempool_txs = mongo_db.mempool.find(projection={'tx_hash': True}) for mempool_tx in mempool_txs: tx_hashes.append(str(mempool_tx['tx_hash'])) params = None if len(tx_hashes) > 0: params = { 'filters': [{ 'field': 'tx_hash', 'op': 'NOT IN', 'value': tx_hashes }, { 'field': 'category', 'op': 'IN', 'value': ['sends', 'btcpays', 'issuances', 'dividends'] }], 'filterop': 'AND' } new_txs = util.call_jsonrpc_api("get_mempool", params, abort_on_error=True) for new_tx in new_txs['result']: tx = { 'tx_hash': new_tx['tx_hash'], 'command': new_tx['command'], 'category': new_tx['category'], 'bindings': new_tx['bindings'], 'timestamp': new_tx['timestamp'], 'viewed_in_block': config.CURRENT_BLOCK_INDEX } mongo_db.mempool.insert(tx) del (tx['_id']) tx['_category'] = tx['category'] tx['_message_index'] = 'mempool' logging.debug("Spotted mempool tx: %s" % tx) util.store_wallet_message(tx, json.loads(tx['bindings']), decorate=False) def clean_mempool_tx(): """clean mempool transactions older than MAX_REORG_NUM_BLOCKS blocks""" mongo_db.mempool.remove({ "viewed_in_block": { "$lt": config.CURRENT_BLOCK_INDEX - config.MAX_REORG_NUM_BLOCKS } }) config.CURRENT_BLOCK_INDEX = 0 config.LAST_MESSAGE_INDEX = -1 config.BLOCKCHAIN_SERVICE_LAST_BLOCK = 0 #simply for printing/alerting purposes config.CAUGHT_UP_STARTED_EVENTS = False #^ set after we are caught up and start up the recurring events that depend on us being caught up with the blockchain #grab our stored preferences, and rebuild the database if necessary app_config = mongo_db.app_config.find() assert app_config.count() in [0, 1] if (app_config.count() == 0 or config.REPARSE_FORCED or app_config[0]['db_version'] != config.DB_VERSION or app_config[0]['running_testnet'] != config.TESTNET): if app_config.count(): logging.warn( "energyblockd database version UPDATED (from %i to %i) or testnet setting changed (from %s to %s), or REINIT forced (%s). REBUILDING FROM SCRATCH ..." % (app_config[0]['db_version'], config.DB_VERSION, app_config[0]['running_testnet'], config.TESTNET, config.REPARSE_FORCED)) else: logging.warn( "energyblockd database app_config collection doesn't exist. BUILDING FROM SCRATCH..." ) app_config = blow_away_db() my_latest_block = LATEST_BLOCK_INIT else: app_config = app_config[0] #get the last processed block out of mongo my_latest_block = mongo_db.processed_blocks.find_one( sort=[("block_index", pymongo.DESCENDING)]) if my_latest_block: my_latest_block = prune_my_stale_blocks( my_latest_block['block_index']) else: config.CURRENT_BLOCK_INDEX = LATEST_BLOCK_INIT while True: try: running_info = util.call_jsonrpc_api("get_running_info", abort_on_error=True) if 'result' not in running_info: raise AssertionError("Could not contact energypartyd") running_info = running_info['result'] except Exception as e: logging.warn( str(e) + " -- Waiting 30 seconds before trying again...") time.sleep(30) continue if running_info['last_message_index'] == -1: logging.warn( "No last_message_index returned. Waiting until energypartyd has messages..." ) time.sleep(30) continue wipeState = False updatePrefs = False if app_config['counterpartyd_db_version_major'] is None \ or app_config['counterpartyd_db_version_minor'] is None \ or app_config['counterpartyd_running_testnet'] is None: updatePrefs = True elif running_info['version_major'] != app_config[ 'counterpartyd_db_version_major']: logging.warn( "energypartyd MAJOR DB version change (we built from %s, energypartyd is at %s). Wiping our state data." % (app_config['counterpartyd_db_version_major'], running_info['version_major'])) wipeState = True updatePrefs = True elif running_info['version_minor'] != app_config[ 'counterpartyd_db_version_minor']: logging.warn( "energypartyd MINOR DB version change (we built from %s.%s, energypartyd is at %s.%s). Wiping our state data." % (app_config['counterpartyd_db_version_major'], app_config['counterpartyd_db_version_minor'], running_info['version_major'], running_info['version_minor'])) wipeState = True updatePrefs = True elif running_info.get( 'running_testnet', False) != app_config['counterpartyd_running_testnet']: logging.warn( "energypartyd testnet setting change (from %s to %s). Wiping our state data." % (app_config['counterpartyd_running_testnet'], running_info['running_testnet'])) wipeState = True updatePrefs = True if wipeState: app_config = blow_away_db() if updatePrefs: app_config['counterpartyd_db_version_major'] = running_info[ 'version_major'] app_config['counterpartyd_db_version_minor'] = running_info[ 'version_minor'] app_config['counterpartyd_running_testnet'] = running_info[ 'running_testnet'] mongo_db.app_config.update({}, app_config) #reset my latest block record my_latest_block = LATEST_BLOCK_INIT config.CAUGHT_UP = False #You've Come a Long Way, Baby last_processed_block = running_info['last_block'] if last_processed_block['block_index'] is None: logging.warn( "energypartyd has no last processed block (probably is reparsing). Waiting 5 seconds before trying again..." ) time.sleep(5) continue if my_latest_block['block_index'] < last_processed_block['block_index']: #need to catch up config.CAUGHT_UP = False cur_block_index = my_latest_block['block_index'] + 1 #get the blocktime for the next block we have to process try: cur_block = util.call_jsonrpc_api( "get_block_info", {'block_index': cur_block_index}, abort_on_error=True)['result'] except Exception as e: logging.warn( str(e) + " Waiting 5 seconds before trying again...") time.sleep(5) continue cur_block['block_time_obj'] = datetime.datetime.utcfromtimestamp( cur_block['block_time']) cur_block['block_time_str'] = cur_block[ 'block_time_obj'].isoformat() try: block_data = util.call_jsonrpc_api( "get_messages", {'block_index': cur_block_index}, abort_on_error=True)['result'] except Exception as e: logging.warn( str(e) + " Waiting 15 seconds before trying again...") time.sleep(15) continue #parse out response (list of txns, ordered as they appeared in the block) for msg in block_data: msg_data = json.loads(msg['bindings']) if msg['message_index'] != config.LAST_MESSAGE_INDEX + 1 and config.LAST_MESSAGE_INDEX != -1: logging.error( "BUG: MESSAGE RECEIVED NOT WHAT WE EXPECTED. EXPECTED: %s, GOT: %s: %s (ALL MSGS IN get_messages PAYLOAD: %s)..." % (config.LAST_MESSAGE_INDEX + 1, msg['message_index'], msg, [m['message_index'] for m in block_data])) my_latest_block = prune_my_stale_blocks( cur_block_index - config.MAX_FORCED_REORG_NUM_BLOCKS) break #sys.exit(1) #FOR NOW if msg['message_index'] <= config.LAST_MESSAGE_INDEX: logging.warn("BUG: IGNORED old RAW message %s: %s ..." % (msg['message_index'], msg)) continue logging.info("Received message %s: %s ..." % (msg['message_index'], msg)) status = msg_data.get('status', 'valid').lower() if status.startswith('invalid'): if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: util.store_wallet_message(msg, msg_data) config.LAST_MESSAGE_INDEX = msg['message_index'] continue #track message types, for compiling of statistics if msg['command'] == 'insert' \ and msg['category'] not in ["debits", "credits", "order_matches", "bet_matches", "order_expirations", "bet_expirations", "order_match_expirations", "bet_match_expirations", "bet_match_resolutions"]: try: mongo_db.transaction_stats.insert({ 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'category': msg['category'] }) except pymongo.errors.DuplicateKeyError as e: logging.exception(e) #HANDLE REORGS if msg['command'] == 'reorg': logging.warn("Blockchain reorginization at block %s" % msg_data['block_index']) #prune back to and including the specified message_index my_latest_block = prune_my_stale_blocks( msg_data['block_index'] - 1) config.CURRENT_BLOCK_INDEX = msg_data['block_index'] - 1 running_info = util.call_jsonrpc_api( "get_running_info", abort_on_error=True)['result'] config.LAST_MESSAGE_INDEX = running_info[ 'last_message_index'] if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: msg_data[ '_last_message_index'] = config.LAST_MESSAGE_INDEX util.store_wallet_message(msg, msg_data) event = util.decorate_message_for_feed( msg, msg_data=msg_data) break #break out of inner loop #track assets if msg['category'] == 'issuances': assets.parse_issuance(mongo_db, msg_data, cur_block_index, cur_block) #track balance changes for each address bal_change = None if msg['category'] in [ 'credits', 'debits', ]: actionName = 'credit' if msg[ 'category'] == 'credits' else 'debit' address = msg_data['address'] asset_info = mongo_db.tracked_assets.find_one( {'asset': msg_data['asset']}) if asset_info is None: logging.warn( "Credit/debit of %s where asset ('%s') does not exist. Ignoring..." % (msg_data['quantity'], msg_data['asset'])) continue quantity = msg_data['quantity'] if msg[ 'category'] == 'credits' else -msg_data['quantity'] quantity_normalized = util_bitcoin.normalize_quantity( quantity, asset_info['divisible']) #look up the previous balance to go off of last_bal_change = mongo_db.balance_changes.find_one( { 'address': address, 'asset': asset_info['asset'] }, sort=[("block_index", pymongo.DESCENDING), ("_id", pymongo.DESCENDING)]) if last_bal_change \ and last_bal_change['block_index'] == cur_block_index: last_bal_change['quantity'] += quantity last_bal_change[ 'quantity_normalized'] += quantity_normalized last_bal_change['new_balance'] += quantity last_bal_change[ 'new_balance_normalized'] += quantity_normalized mongo_db.balance_changes.save(last_bal_change) logging.info( "Procesed %s bal change (UPDATED) from tx %s :: %s" % (actionName, msg['message_index'], last_bal_change)) bal_change = last_bal_change else: #new balance change record for this block bal_change = { 'address': address, 'asset': asset_info['asset'], 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'quantity': quantity, 'quantity_normalized': quantity_normalized, 'new_balance': last_bal_change['new_balance'] + quantity if last_bal_change else quantity, 'new_balance_normalized': last_bal_change['new_balance_normalized'] + quantity_normalized if last_bal_change else quantity_normalized, } mongo_db.balance_changes.insert(bal_change) logging.info( "Procesed %s bal change from tx %s :: %s" % (actionName, msg['message_index'], bal_change)) #book trades if (msg['category'] == 'order_matches' and ((msg['command'] == 'update' and msg_data['status'] == 'completed') or ('forward_asset' in msg_data and msg_data['forward_asset'] != config.BTC and msg_data['backward_asset'] != config.BTC))): if msg['command'] == 'update' and msg_data[ 'status'] == 'completed': tx0_hash, tx1_hash = msg_data[ 'order_match_id'][:64], msg_data['order_match_id'][ 64:] order_match = util.call_jsonrpc_api( "get_order_matches", { 'filters': [{ 'field': 'tx0_hash', 'op': '==', 'value': tx0_hash }, { 'field': 'tx1_hash', 'op': '==', 'value': tx1_hash }] }, abort_on_error=False)['result'][0] else: assert msg_data['status'] == 'completed' order_match = msg_data forward_asset_info = mongo_db.tracked_assets.find_one( {'asset': order_match['forward_asset']}) backward_asset_info = mongo_db.tracked_assets.find_one( {'asset': order_match['backward_asset']}) assert forward_asset_info and backward_asset_info base_asset, quote_asset = util.assets_to_asset_pair( order_match['forward_asset'], order_match['backward_asset']) if (order_match['forward_asset'] == config.BTC and order_match['forward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF) \ or (order_match['backward_asset'] == config.BTC and order_match['backward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF): logging.debug( "Order match %s ignored due to %s under dust limit." % (order_match['tx0_hash'] + order_match['tx1_hash'], config.BTC)) continue #take divisible trade quantities to floating point forward_quantity = util_bitcoin.normalize_quantity( order_match['forward_quantity'], forward_asset_info['divisible']) backward_quantity = util_bitcoin.normalize_quantity( order_match['backward_quantity'], backward_asset_info['divisible']) #compose trade trade = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'message_index': msg['message_index'], #secondary temporaral ordering off of when 'order_match_id': order_match['tx0_hash'] + '_' + order_match['tx1_hash'], 'order_match_tx0_index': order_match['tx0_index'], 'order_match_tx1_index': order_match['tx1_index'], 'order_match_tx0_address': order_match['tx0_address'], 'order_match_tx1_address': order_match['tx1_address'], 'base_asset': base_asset, 'quote_asset': quote_asset, 'base_quantity': order_match['forward_quantity'] if order_match['forward_asset'] == base_asset else order_match['backward_quantity'], 'quote_quantity': order_match['backward_quantity'] if order_match['forward_asset'] == base_asset else order_match['forward_quantity'], 'base_quantity_normalized': forward_quantity if order_match['forward_asset'] == base_asset else backward_quantity, 'quote_quantity_normalized': backward_quantity if order_match['forward_asset'] == base_asset else forward_quantity, } d = D(trade['quote_quantity_normalized']) / D( trade['base_quantity_normalized']) d = d.quantize(EIGHT_PLACES, rounding=decimal.ROUND_HALF_EVEN, context=decimal.Context(prec=20)) trade['unit_price'] = float(d) d = D(trade['base_quantity_normalized']) / D( trade['quote_quantity_normalized']) d = d.quantize(EIGHT_PLACES, rounding=decimal.ROUND_HALF_EVEN, context=decimal.Context(prec=20)) trade['unit_price_inverse'] = float(d) mongo_db.trades.insert(trade) logging.info("Procesed Trade from tx %s :: %s" % (msg['message_index'], trade)) #broadcast if msg['category'] == 'broadcasts': betting.parse_broadcast(mongo_db, msg_data) if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: #send out the message to listening clients util.store_wallet_message(msg, msg_data) #this is the last processed message index config.LAST_MESSAGE_INDEX = msg['message_index'] new_block = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'block_hash': cur_block['block_hash'], } mongo_db.processed_blocks.insert(new_block) my_latest_block = new_block config.CURRENT_BLOCK_INDEX = cur_block_index if config.BLOCKCHAIN_SERVICE_LAST_BLOCK == 0 or config.BLOCKCHAIN_SERVICE_LAST_BLOCK - config.CURRENT_BLOCK_INDEX < config.MAX_REORG_NUM_BLOCKS: try: block_height_response = blockchain.getinfo() except: block_height_response = None config.BLOCKCHAIN_SERVICE_LAST_BLOCK = block_height_response[ 'info']['blocks'] if block_height_response else 0 logging.info( "Block: %i (message_index height=%s) (blockchain latest block=%s)" % (config.CURRENT_BLOCK_INDEX, config.LAST_MESSAGE_INDEX if config.LAST_MESSAGE_INDEX != -1 else '???', config.BLOCKCHAIN_SERVICE_LAST_BLOCK if config.BLOCKCHAIN_SERVICE_LAST_BLOCK else '???')) clean_mempool_tx() elif my_latest_block['block_index'] > last_processed_block[ 'block_index']: logging.error( "Very odd: Ahead of energypartyd with block indexes! Pruning back %s blocks to be safe." % config.MAX_REORG_NUM_BLOCKS) my_latest_block = prune_my_stale_blocks( last_processed_block['block_index'] - config.MAX_REORG_NUM_BLOCKS) else: config.CAUGHT_UP = running_info['db_caught_up'] if config.LAST_MESSAGE_INDEX == -1 or config.CURRENT_BLOCK_INDEX == 0: if config.LAST_MESSAGE_INDEX == -1: config.LAST_MESSAGE_INDEX = running_info[ 'last_message_index'] if config.CURRENT_BLOCK_INDEX == 0: config.CURRENT_BLOCK_INDEX = running_info['last_block'][ 'block_index'] logging.info( "Detected blocks caught up on startup. Setting last message idx to %s, current block index to %s ..." % (config.LAST_MESSAGE_INDEX, config.CURRENT_BLOCK_INDEX)) if config.CAUGHT_UP and not config.CAUGHT_UP_STARTED_EVENTS: logging.debug( "Starting event timer: compile_asset_pair_market_info") gevent.spawn(events.compile_asset_pair_market_info) logging.debug( "Starting event timer: compile_asset_market_info") gevent.spawn(events.compile_asset_market_info) logging.debug( "Starting event timer: compile_extended_asset_info") gevent.spawn(events.compile_extended_asset_info) logging.debug( "Starting event timer: compile_extended_feed_info") gevent.spawn(events.compile_extended_feed_info) config.CAUGHT_UP_STARTED_EVENTS = True publish_mempool_tx() time.sleep(30)
mongo_db.transaction_stats.insert({ 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'message_index': msg['message_index'], 'category': msg['category'] }) #HANDLE REORGS if msg['command'] == 'reorg': logging.warn("Blockchain reorginization at block %s" % msg_data['block_index']) #prune back to and including the specified message_index my_latest_block = prune_my_stale_blocks(msg_data['block_index'] - 1) config.CURRENT_BLOCK_INDEX = msg_data['block_index'] - 1 #for the current last_message_index (which could have gone down after the reorg), query clearinghoused running_info = util.call_jsonrpc_api("get_running_info", abort_on_error=True)['result'] config.LAST_MESSAGE_INDEX = running_info['last_message_index'] #send out the message to listening clients (but don't forward along while we're catching up) if last_processed_block['block_index'] - my_latest_block['block_index'] < config.MAX_REORG_NUM_BLOCKS: msg_data['_last_message_index'] = config.LAST_MESSAGE_INDEX event = util.decorate_message_for_feed(msg, msg_data=msg_data) zmq_publisher_eventfeed.send_json(event) break #break out of inner loop #track assets if msg['category'] == 'issuances': assets.parse_issuance(mongo_db, msg_data, cur_block_index, cur_block) #track balance changes for each address bal_change = None
def process_cpd_blockfeed(zmq_publisher_eventfeed): LATEST_BLOCK_INIT = { 'block_index': config.BLOCK_FIRST, 'block_time': None, 'block_hash': None } mongo_db = config.mongo_db blocks_to_insert = [] def blow_away_db(): """boom! blow away all applicable collections in mongo""" mongo_db.processed_blocks.drop() mongo_db.tracked_assets.drop() mongo_db.trades.drop() mongo_db.balance_changes.drop() mongo_db.asset_market_info.drop() mongo_db.asset_marketcap_history.drop() mongo_db.pair_market_info.drop() mongo_db.btc_open_orders.drop() mongo_db.asset_extended_info.drop() mongo_db.transaction_stats.drop() mongo_db.feeds.drop() mongo_db.wallet_stats.drop() #create/update default app_config object mongo_db.app_config.update( {}, { 'db_version': config.DB_VERSION, #counterblockd database version 'running_testnet': config.TESTNET, 'counterpartyd_db_version_major': None, 'counterpartyd_db_version_minor': None, 'counterpartyd_running_testnet': None, 'last_block_assets_compiled': config. BLOCK_FIRST, #for asset data compilation in events.py (resets on reparse as well) }, upsert=True) app_config = mongo_db.app_config.find()[0] #DO NOT DELETE preferences and chat_handles and chat_history #create XCP and BTC assets in tracked_assets for asset in [config.XCP, config.BTC]: base_asset = { 'asset': asset, 'owner': None, 'divisible': True, 'locked': False, 'total_issued': None, '_at_block': config.BLOCK_FIRST, #the block ID this asset is current for '_history': [] #to allow for block rollbacks } mongo_db.tracked_assets.insert(base_asset) #reinitialize some internal counters config.CURRENT_BLOCK_INDEX = 0 config.LAST_MESSAGE_INDEX = -1 return app_config def prune_my_stale_blocks(max_block_index): """called if there are any records for blocks higher than this in the database? If so, they were impartially created and we should get rid of them NOTE: after calling this function, you should always trigger a "continue" statement to reiterate the processing loop (which will get a new last_processed_block from counterpartyd and resume as appropriate) """ logging.warn("Pruning to block %i ..." % (max_block_index)) mongo_db.processed_blocks.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.balance_changes.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.trades.remove({"block_index": {"$gt": max_block_index}}) mongo_db.asset_marketcap_history.remove( {"block_index": { "$gt": max_block_index }}) mongo_db.transaction_stats.remove( {"block_index": { "$gt": max_block_index }}) #to roll back the state of the tracked asset, dive into the history object for each asset that has # been updated on or after the block that we are pruning back to assets_to_prune = mongo_db.tracked_assets.find( {'_at_block': { "$gt": max_block_index }}) for asset in assets_to_prune: logging.info( "Pruning asset %s (last modified @ block %i, pruning to state at block %i)" % (asset['asset'], asset['_at_block'], max_block_index)) prev_ver = None while len(asset['_history']): prev_ver = asset['_history'].pop() if prev_ver['_at_block'] <= max_block_index: break if prev_ver: if prev_ver['_at_block'] > max_block_index: #even the first history version is newer than max_block_index. #in this case, just remove the asset tracking record itself mongo_db.tracked_assets.remove({'asset': asset['asset']}) else: #if here, we were able to find a previous version that was saved at or before max_block_index # (which should be prev_ver ... restore asset's values to its values prev_ver['_id'] = asset['_id'] prev_ver['_history'] = asset['_history'] mongo_db.tracked_assets.save(prev_ver) config.LAST_MESSAGE_INDEX = -1 config.CAUGHT_UP = False util.blockinfo_cache.clear() latest_block = mongo_db.processed_blocks.find_one( {"block_index": max_block_index}) or LATEST_BLOCK_INIT return latest_block def publish_mempool_tx(): """fetch new tx from mempool""" tx_hashes = [] mempool_txs = mongo_db.mempool.find(fields={'tx_hash': True}) for mempool_tx in mempool_txs: tx_hashes.append(str(mempool_tx['tx_hash'])) params = None if len(tx_hashes) > 0: params = { 'filters': [{ 'field': 'tx_hash', 'op': 'NOT IN', 'value': tx_hashes }, { 'field': 'category', 'op': 'IN', 'value': [ 'sends', 'btcpays', 'issuances', 'dividends', 'callbacks' ] }], 'filterop': 'AND' } new_txs = util.call_jsonrpc_api("get_mempool", params, abort_on_error=True) for new_tx in new_txs['result']: tx = { 'tx_hash': new_tx['tx_hash'], 'command': new_tx['command'], 'category': new_tx['category'], 'bindings': new_tx['bindings'], 'timestamp': new_tx['timestamp'], 'viewed_in_block': config.CURRENT_BLOCK_INDEX } mongo_db.mempool.insert(tx) del (tx['_id']) tx['_category'] = tx['category'] tx['_message_index'] = 'mempool' logging.debug("Spotted mempool tx: %s" % tx) zmq_publisher_eventfeed.send_json(tx) def clean_mempool_tx(): """clean mempool transactions older than MAX_REORG_NUM_BLOCKS blocks""" mongo_db.mempool.remove({ "viewed_in_block": { "$lt": config.CURRENT_BLOCK_INDEX - config.MAX_REORG_NUM_BLOCKS } }) config.CURRENT_BLOCK_INDEX = 0 #initialize (last processed block index -- i.e. currently active block) config.LAST_MESSAGE_INDEX = -1 #initialize (last processed message index) config.BLOCKCHAIN_SERVICE_LAST_BLOCK = 0 #simply for printing/alerting purposes config.CAUGHT_UP_STARTED_EVENTS = False #^ set after we are caught up and start up the recurring events that depend on us being caught up with the blockchain #grab our stored preferences, and rebuild the database if necessary app_config = mongo_db.app_config.find() assert app_config.count() in [0, 1] if (app_config.count() == 0 or config.REPARSE_FORCED or app_config[0]['db_version'] != config.DB_VERSION or app_config[0]['running_testnet'] != config.TESTNET): if app_config.count(): logging.warn( "counterblockd database version UPDATED (from %i to %i) or testnet setting changed (from %s to %s), or REINIT forced (%s). REBUILDING FROM SCRATCH ..." % (app_config[0]['db_version'], config.DB_VERSION, app_config[0]['running_testnet'], config.TESTNET, config.REPARSE_FORCED)) else: logging.warn( "counterblockd database app_config collection doesn't exist. BUILDING FROM SCRATCH..." ) app_config = blow_away_db() my_latest_block = LATEST_BLOCK_INIT else: app_config = app_config[0] #get the last processed block out of mongo my_latest_block = mongo_db.processed_blocks.find_one( sort=[("block_index", pymongo.DESCENDING)]) or LATEST_BLOCK_INIT #remove any data we have for blocks higher than this (would happen if counterblockd or mongo died # or errored out while processing a block) my_latest_block = prune_my_stale_blocks(my_latest_block['block_index']) #avoid contacting counterpartyd (on reparse, to speed up) autopilot = False autopilot_runner = 0 #start polling counterpartyd for new blocks while True: if not autopilot or autopilot_runner == 0: try: running_info = util.call_jsonrpc_api("get_running_info", abort_on_error=True) if 'result' not in running_info: raise AssertionError("Could not contact counterpartyd") running_info = running_info['result'] except Exception, e: logging.warn( str(e) + " -- Waiting 3 seconds before trying again...") time.sleep(3) continue if running_info[ 'last_message_index'] == -1: #last_message_index not set yet (due to no messages in counterpartyd DB yet) logging.warn( "No last_message_index returned. Waiting until counterpartyd has messages..." ) time.sleep(10) continue #wipe our state data if necessary, if counterpartyd has moved on to a new DB version wipeState = False updatePrefs = False if app_config['counterpartyd_db_version_major'] is None \ or app_config['counterpartyd_db_version_minor'] is None \ or app_config['counterpartyd_running_testnet'] is None: updatePrefs = True elif running_info['version_major'] != app_config[ 'counterpartyd_db_version_major']: logging.warn( "counterpartyd MAJOR DB version change (we built from %s, counterpartyd is at %s). Wiping our state data." % (app_config['counterpartyd_db_version_major'], running_info['version_major'])) wipeState = True updatePrefs = True elif running_info['version_minor'] != app_config[ 'counterpartyd_db_version_minor']: logging.warn( "counterpartyd MINOR DB version change (we built from %s.%s, counterpartyd is at %s.%s). Wiping our state data." % (app_config['counterpartyd_db_version_major'], app_config['counterpartyd_db_version_minor'], running_info['version_major'], running_info['version_minor'])) wipeState = True updatePrefs = True elif running_info.get( 'running_testnet', False) != app_config['counterpartyd_running_testnet']: logging.warn( "counterpartyd testnet setting change (from %s to %s). Wiping our state data." % (app_config['counterpartyd_running_testnet'], running_info['running_testnet'])) wipeState = True updatePrefs = True if wipeState: app_config = blow_away_db() if updatePrefs: app_config['counterpartyd_db_version_major'] = running_info[ 'version_major'] app_config['counterpartyd_db_version_minor'] = running_info[ 'version_minor'] app_config['counterpartyd_running_testnet'] = running_info[ 'running_testnet'] mongo_db.app_config.update({}, app_config) #reset my latest block record my_latest_block = LATEST_BLOCK_INIT config.CAUGHT_UP = False #You've Come a Long Way, Baby #work up to what block counterpartyd is at last_processed_block = running_info['last_block'] if last_processed_block['block_index'] is None: logging.warn( "counterpartyd has no last processed block (probably is reparsing). Waiting 3 seconds before trying again..." ) time.sleep(3) continue if my_latest_block['block_index'] < last_processed_block['block_index']: #need to catch up config.CAUGHT_UP = False if last_processed_block['block_index'] - my_latest_block[ 'block_index'] > 500: #we are safely far from the tip, switch to bulk-everything autopilot = True if autopilot_runner == 0: autopilot_runner = 500 autopilot_runner -= 1 else: autopilot = False cur_block_index = my_latest_block['block_index'] + 1 try: cur_block = util.get_block_info_cached( cur_block_index, min( 200, last_processed_block['block_index'] - my_latest_block['block_index'])) block_data = cur_block['_messages'] except Exception, e: logging.warn( str(e) + " Waiting 3 seconds before trying again...") time.sleep(3) continue cur_block['block_time_obj'] = datetime.datetime.utcfromtimestamp( cur_block['block_time']) cur_block['block_time_str'] = cur_block[ 'block_time_obj'].isoformat() # clean api cache if last_processed_block[ 'block_index'] - cur_block_index <= config.MAX_REORG_NUM_BLOCKS: #only when we are near the tip util.clean_block_cache(cur_block_index) #parse out response (list of txns, ordered as they appeared in the block) for msg in block_data: msg_data = json.loads(msg['bindings']) if msg['message_index'] != config.LAST_MESSAGE_INDEX + 1 and config.LAST_MESSAGE_INDEX != -1: logging.error( "BUG: MESSAGE RECEIVED NOT WHAT WE EXPECTED. EXPECTED: %s, GOT: %s: %s (ALL MSGS IN get_messages PAYLOAD: %s)..." % (config.LAST_MESSAGE_INDEX + 1, msg['message_index'], msg, [m['message_index'] for m in block_data])) # we are likely cojones deep in desync, enforcing deep reorg my_latest_block = prune_my_stale_blocks( cur_block_index - config.MAX_FORCED_REORG_NUM_BLOCKS) break #sys.exit(1) #FOR NOW #BUG: sometimes counterpartyd seems to return OLD messages out of the message feed. deal with those #TODO unreachable now, delete? if msg['message_index'] <= config.LAST_MESSAGE_INDEX: logging.warn("BUG: IGNORED old RAW message %s: %s ..." % (msg['message_index'], msg)) continue logging.info("Received message %s: %s ..." % (msg['message_index'], msg)) #don't process invalid messages, but do forward them along to clients status = msg_data.get('status', 'valid').lower() if status.startswith('invalid'): #(but don't forward along while we're catching up) if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: event = util.decorate_message_for_feed( msg, msg_data=msg_data) zmq_publisher_eventfeed.send_json(event) config.LAST_MESSAGE_INDEX = msg['message_index'] continue #track message types, for compiling of statistics if msg['command'] == 'insert' \ and msg['category'] not in ["debits", "credits", "order_matches", "bet_matches", "order_expirations", "bet_expirations", "order_match_expirations", "bet_match_expirations", "rps_matches", "rps_expirations", "rps_match_expirations", "bet_match_resolutions"]: mongo_db.transaction_stats.insert({ 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'message_index': msg['message_index'], 'category': msg['category'] }) #HANDLE REORGS if msg['command'] == 'reorg': logging.warn("Blockchain reorginization at block %s" % msg_data['block_index']) #prune back to and including the specified message_index my_latest_block = prune_my_stale_blocks( msg_data['block_index'] - 1) config.CURRENT_BLOCK_INDEX = msg_data['block_index'] - 1 #for the current last_message_index (which could have gone down after the reorg), query counterpartyd running_info = util.call_jsonrpc_api( "get_running_info", abort_on_error=True)['result'] config.LAST_MESSAGE_INDEX = running_info[ 'last_message_index'] #send out the message to listening clients (but don't forward along while we're catching up) if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: msg_data[ '_last_message_index'] = config.LAST_MESSAGE_INDEX event = util.decorate_message_for_feed( msg, msg_data=msg_data) zmq_publisher_eventfeed.send_json(event) break #break out of inner loop #track assets if msg['category'] == 'issuances': assets.parse_issuance(mongo_db, msg_data, cur_block_index, cur_block) #track balance changes for each address bal_change = None if msg['category'] in [ 'credits', 'debits', ]: actionName = 'credit' if msg[ 'category'] == 'credits' else 'debit' address = msg_data['address'] asset_info = mongo_db.tracked_assets.find_one( {'asset': msg_data['asset']}) if asset_info is None: logging.warn( "Credit/debit of %s where asset ('%s') does not exist. Ignoring..." % (msg_data['quantity'], msg_data['asset'])) config.LAST_MESSAGE_INDEX = msg['message_index'] continue quantity = msg_data['quantity'] if msg[ 'category'] == 'credits' else -msg_data['quantity'] quantity_normalized = util_bitcoin.normalize_quantity( quantity, asset_info['divisible']) #look up the previous balance to go off of last_bal_change = mongo_db.balance_changes.find_one( { 'address': address, 'asset': asset_info['asset'] }, sort=[("block_index", pymongo.DESCENDING), ("_id", pymongo.DESCENDING)]) if last_bal_change \ and last_bal_change['block_index'] == cur_block_index: #modify this record, as we want at most one entry per block index for each (address, asset) pair last_bal_change['quantity'] += quantity last_bal_change[ 'quantity_normalized'] += quantity_normalized last_bal_change['new_balance'] += quantity last_bal_change[ 'new_balance_normalized'] += quantity_normalized mongo_db.balance_changes.save(last_bal_change) logging.info( "Procesed %s bal change (UPDATED) from tx %s :: %s" % (actionName, msg['message_index'], last_bal_change)) bal_change = last_bal_change else: #new balance change record for this block bal_change = { 'address': address, 'asset': asset_info['asset'], 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'quantity': quantity, 'quantity_normalized': quantity_normalized, 'new_balance': last_bal_change['new_balance'] + quantity if last_bal_change else quantity, 'new_balance_normalized': last_bal_change['new_balance_normalized'] + quantity_normalized if last_bal_change else quantity_normalized, } mongo_db.balance_changes.insert(bal_change) logging.info( "Procesed %s bal change from tx %s :: %s" % (actionName, msg['message_index'], bal_change)) #book trades if (msg['category'] == 'order_matches' and (( msg['command'] == 'update' and msg_data['status'] == 'completed' ) #for a trade with BTC involved, but that is settled (completed) or ('forward_asset' in msg_data and msg_data['forward_asset'] != config.BTC and msg_data['backward_asset'] != config.BTC)) ): #or for a trade without BTC on either end if msg['command'] == 'update' and msg_data[ 'status'] == 'completed': #an order is being updated to a completed status (i.e. a BTCpay has completed) tx0_hash, tx1_hash = msg_data[ 'order_match_id'][:64], msg_data['order_match_id'][ 64:] #get the order_match this btcpay settles order_match = util.call_jsonrpc_api( "get_order_matches", { 'filters': [{ 'field': 'tx0_hash', 'op': '==', 'value': tx0_hash }, { 'field': 'tx1_hash', 'op': '==', 'value': tx1_hash }] }, abort_on_error=True)['result'][0] else: assert msg_data[ 'status'] == 'completed' #should not enter a pending state for non BTC matches order_match = msg_data forward_asset_info = mongo_db.tracked_assets.find_one( {'asset': order_match['forward_asset']}) backward_asset_info = mongo_db.tracked_assets.find_one( {'asset': order_match['backward_asset']}) assert forward_asset_info and backward_asset_info base_asset, quote_asset = util.assets_to_asset_pair( order_match['forward_asset'], order_match['backward_asset']) #don't create trade records from order matches with BTC that are under the dust limit if (order_match['forward_asset'] == config.BTC and order_match['forward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF) \ or (order_match['backward_asset'] == config.BTC and order_match['backward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF): logging.debug( "Order match %s ignored due to %s under dust limit." % (order_match['tx0_hash'] + order_match['tx1_hash'], config.BTC)) config.LAST_MESSAGE_INDEX = msg['message_index'] continue #take divisible trade quantities to floating point forward_quantity = util_bitcoin.normalize_quantity( order_match['forward_quantity'], forward_asset_info['divisible']) backward_quantity = util_bitcoin.normalize_quantity( order_match['backward_quantity'], backward_asset_info['divisible']) #compose trade trade = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'message_index': msg['message_index'], #secondary temporaral ordering off of when 'order_match_id': order_match['tx0_hash'] + order_match['tx1_hash'], 'order_match_tx0_index': order_match['tx0_index'], 'order_match_tx1_index': order_match['tx1_index'], 'order_match_tx0_address': order_match['tx0_address'], 'order_match_tx1_address': order_match['tx1_address'], 'base_asset': base_asset, 'quote_asset': quote_asset, 'base_quantity': order_match['forward_quantity'] if order_match['forward_asset'] == base_asset else order_match['backward_quantity'], 'quote_quantity': order_match['backward_quantity'] if order_match['forward_asset'] == base_asset else order_match['forward_quantity'], 'base_quantity_normalized': forward_quantity if order_match['forward_asset'] == base_asset else backward_quantity, 'quote_quantity_normalized': backward_quantity if order_match['forward_asset'] == base_asset else forward_quantity, } trade['unit_price'] = float( (D(trade['quote_quantity_normalized']) / D(trade['base_quantity_normalized'])).quantize( D('.00000000'), rounding=decimal.ROUND_HALF_EVEN)) trade['unit_price_inverse'] = float( (D(trade['base_quantity_normalized']) / D(trade['quote_quantity_normalized'])).quantize( D('.00000000'), rounding=decimal.ROUND_HALF_EVEN)) mongo_db.trades.insert(trade) logging.info("Procesed Trade from tx %s :: %s" % (msg['message_index'], trade)) #broadcast if msg['category'] == 'broadcasts': betting.parse_broadcast(mongo_db, msg_data) #if we're catching up beyond MAX_REORG_NUM_BLOCKS blocks out, make sure not to send out any socket.io # events, as to not flood on a resync (as we may give a 525 to kick the logged in clients out, but we # can't guarantee that the socket.io connection will always be severed as well??) if last_processed_block['block_index'] - my_latest_block[ 'block_index'] < config.MAX_REORG_NUM_BLOCKS: #send out the message to listening clients event = util.decorate_message_for_feed(msg, msg_data=msg_data) zmq_publisher_eventfeed.send_json(event) #this is the last processed message index config.LAST_MESSAGE_INDEX = msg['message_index'] else: #block successfully processed, track this in our DB new_block = { 'block_index': cur_block_index, 'block_time': cur_block['block_time_obj'], 'block_hash': cur_block['block_hash'], } blocks_to_insert.append(new_block) if last_processed_block[ 'block_index'] - cur_block_index > 1000: #reparsing, do bulk inserts if len(blocks_to_insert) >= 1000: mongo_db.processed_blocks.insert(blocks_to_insert) blocks_to_insert[:] = [] else: mongo_db.processed_blocks.insert(blocks_to_insert) blocks_to_insert[:] = [] my_latest_block = new_block config.CURRENT_BLOCK_INDEX = cur_block_index #get the current blockchain service block if config.BLOCKCHAIN_SERVICE_LAST_BLOCK == 0 or config.BLOCKCHAIN_SERVICE_LAST_BLOCK - config.CURRENT_BLOCK_INDEX < config.MAX_REORG_NUM_BLOCKS: #update as CURRENT_BLOCK_INDEX catches up with BLOCKCHAIN_SERVICE_LAST_BLOCK and/or surpasses it (i.e. if blockchain service gets behind for some reason) try: block_height_response = blockchain.getinfo() except: block_height_response = None config.BLOCKCHAIN_SERVICE_LAST_BLOCK = block_height_response[ 'info']['blocks'] if block_height_response else 0 logging.info( "Block: %i (message_index height=%s) (blockchain latest block=%s)" % (config.CURRENT_BLOCK_INDEX, config.LAST_MESSAGE_INDEX if config.LAST_MESSAGE_INDEX != -1 else '???', config.BLOCKCHAIN_SERVICE_LAST_BLOCK if config.BLOCKCHAIN_SERVICE_LAST_BLOCK else '???')) if last_processed_block[ 'block_index'] - cur_block_index < config.MAX_REORG_NUM_BLOCKS: #only when we are near the tip clean_mempool_tx()
def search_raw_transactions(address): result = util.call_jsonrpc_api('search_raw_transactions', {'address': address}) return result['result']
def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provided=0.95, max_fee_required=0.95): base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) if not supplies: supplies = get_assets_supply([asset1, asset2]) market_orders = [] sql = '''SELECT orders.*, blocks.block_time FROM orders INNER JOIN blocks ON orders.block_index=blocks.block_index WHERE status = ? ''' bindings = ['open'] if len(addresses) > 0: sql += '''AND source IN ({}) '''.format(','.join( ['?' for e in range(0, len(addresses))])) bindings += addresses sql += '''AND give_remaining > 0 AND give_asset IN (?, ?) AND get_asset IN (?, ?) ORDER BY tx_index DESC''' bindings += [asset1, asset2, asset1, asset2] orders = util.call_jsonrpc_api('sql', { 'query': sql, 'bindings': bindings })['result'] for order in orders: user_order = {} exclude = False if order['give_asset'] == 'PEPE': try: fee_provided = order['fee_provided'] / ( order['give_quantity'] / 100) user_order['fee_provided'] = format( D(order['fee_provided']) / (D(order['give_quantity']) / D(100)), '.2f') except Exception as e: fee_provided = min_fee_provided - 1 # exclude exclude = fee_provided < min_fee_provided elif order['get_asset'] == 'PEPE': try: fee_required = order['fee_required'] / (order['get_quantity'] / 100) user_order['fee_required'] = format( D(order['fee_required']) / (D(order['get_quantity']) / D(100)), '.2f') except Exception as e: fee_required = max_fee_required + 1 # exclude exclude = fee_required > max_fee_required if not exclude: if order['give_asset'] == base_asset: price = calculate_price(order['give_quantity'], order['get_quantity'], supplies[order['give_asset']][1], supplies[order['get_asset']][1]) user_order['type'] = 'SELL' user_order['amount'] = order['give_remaining'] user_order['total'] = int(order['give_remaining'] * price) else: price = calculate_price(order['get_quantity'], order['give_quantity'], supplies[order['get_asset']][1], supplies[order['give_asset']][1]) user_order['type'] = 'BUY' user_order['total'] = order['give_remaining'] user_order['amount'] = int(order['give_remaining'] / price) user_order['price'] = format(price, '.8f') if len(addresses) == 0 and len(market_orders) > 0: previous_order = market_orders[-1] if previous_order['type'] == user_order[ 'type'] and previous_order['price'] == user_order[ 'price']: market_orders[-1]['amount'] += user_order['amount'] market_orders[-1]['total'] += user_order['total'] exclude = True if len(addresses) > 0: completed = format( ((D(order['give_quantity']) - D(order['give_remaining'])) / D(order['give_quantity'])) * D(100), '.2f') user_order['completion'] = "{}%".format(completed) user_order['tx_index'] = order['tx_index'] user_order['tx_hash'] = order['tx_hash'] user_order['source'] = order['source'] user_order['block_index'] = order['block_index'] user_order['block_time'] = order['block_time'] if not exclude: market_orders.append(user_order) return market_orders
def compile_asset_pair_market_info(): """Compiles the pair-level statistics that show on the View Prices page of litetokenswallet, for instance""" #loop through all open orders, and compile a listing of pairs, with a count of open orders for each pair mongo_db = config.mongo_db end_dt = datetime.datetime.utcnow() start_dt = end_dt - datetime.timedelta(days=1) start_block_index, end_block_index = util.get_block_indexes_for_dates(start_dt=start_dt, end_dt=end_dt) open_orders = util.call_jsonrpc_api("get_orders", { 'filters': [ {'field': 'give_remaining', 'op': '>', 'value': 0}, {'field': 'get_remaining', 'op': '>', 'value': 0}, {'field': 'fee_required_remaining', 'op': '>=', 'value': 0}, {'field': 'fee_provided_remaining', 'op': '>=', 'value': 0}, ], 'status': 'open', 'show_expired': False, }, abort_on_error=True)['result'] pair_data = {} asset_info = {} def get_price(base_quantity_normalized, quote_quantity_normalized): return float(D(quote_quantity_normalized / base_quantity_normalized )) #COMPOSE order depth, lowest ask, and highest bid column data for o in open_orders: (base_asset, quote_asset) = util.assets_to_asset_pair(o['give_asset'], o['get_asset']) pair = '%s/%s' % (base_asset, quote_asset) base_asset_info = asset_info.get(base_asset, mongo_db.tracked_assets.find_one({ 'asset': base_asset })) if base_asset not in asset_info: asset_info[base_asset] = base_asset_info quote_asset_info = asset_info.get(quote_asset, mongo_db.tracked_assets.find_one({ 'asset': quote_asset })) if quote_asset not in asset_info: asset_info[quote_asset] = quote_asset_info pair_data.setdefault(pair, {'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None, 'completed_trades_count': 0, 'vol_base': 0, 'vol_quote': 0}) #^ highest ask = open order selling base, highest bid = open order buying base #^ we also initialize completed_trades_count, vol_base, vol_quote because every pair inited here may # not have cooresponding data out of the trades_data_by_pair aggregation below pair_data[pair]['open_orders_count'] += 1 base_quantity_normalized = util_litecoin.normalize_quantity(o['give_quantity'] if base_asset == o['give_asset'] else o['get_quantity'], base_asset_info['divisible']) quote_quantity_normalized = util_litecoin.normalize_quantity(o['give_quantity'] if quote_asset == o['give_asset'] else o['get_quantity'], quote_asset_info['divisible']) order_price = get_price(base_quantity_normalized, quote_quantity_normalized) if base_asset == o['give_asset']: #selling base if pair_data[pair]['lowest_ask'] is None or order_price < pair_data[pair]['lowest_ask']: pair_data[pair]['lowest_ask'] = order_price elif base_asset == o['get_asset']: #buying base if pair_data[pair]['highest_bid'] is None or order_price > pair_data[pair]['highest_bid']: pair_data[pair]['highest_bid'] = order_price #COMPOSE volume data (in XLT and LTC), and % change data #loop through all trade volume over the past 24h, and match that to the open orders trades_data_by_pair = mongo_db.trades.aggregate([ {"$match": { "block_time": {"$gte": start_dt, "$lte": end_dt } } }, {"$project": { "base_asset": 1, "quote_asset": 1, "base_quantity_normalized": 1, #to derive base volume "quote_quantity_normalized": 1 #to derive quote volume }}, {"$group": { "_id": {"base_asset": "$base_asset", "quote_asset": "$quote_asset"}, "vol_base": {"$sum": "$base_quantity_normalized"}, "vol_quote": {"$sum": "$quote_quantity_normalized"}, "count": {"$sum": 1}, }} ]) trades_data_by_pair = [] if not trades_data_by_pair['ok'] else trades_data_by_pair['result'] for e in trades_data_by_pair: pair = '%s/%s' % (e['_id']['base_asset'], e['_id']['quote_asset']) pair_data.setdefault(pair, {'open_orders_count': 0, 'lowest_ask': None, 'highest_bid': None}) #^ initialize an empty pair in the event there are no open orders for that pair, but there ARE completed trades for it pair_data[pair]['completed_trades_count'] = e['count'] pair_data[pair]['vol_base'] = e['vol_base'] pair_data[pair]['vol_quote'] = e['vol_quote'] #compose price data, relative to LTC and XLT mps_xlt_ltc, xlt_ltc_price, ltc_xlt_price = get_price_primatives() for pair, e in pair_data.iteritems(): base_asset, quote_asset = pair.split('/') _24h_vol_in_ltc = None _24h_vol_in_xlt = None #derive asset price data, expressed in LTC and XLT, for the given volumes if base_asset == config.XLT: _24h_vol_in_xlt = e['vol_base'] _24h_vol_in_ltc = util_litecoin.round_out(e['vol_base'] * xlt_ltc_price) if xlt_ltc_price else 0 elif base_asset == config.LTC: _24h_vol_in_xlt = util_litecoin.round_out(e['vol_base'] * ltc_xlt_price) if ltc_xlt_price else 0 _24h_vol_in_ltc = e['vol_base'] else: #base is not XLT or LTC price_summary_in_xlt, price_summary_in_ltc, price_in_xlt, price_in_ltc, aggregated_price_in_xlt, aggregated_price_in_ltc = \ get_xlt_ltc_price_info(base_asset, mps_xlt_ltc, xlt_ltc_price, ltc_xlt_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if price_in_xlt: _24h_vol_in_xlt = util_litecoin.round_out(e['vol_base'] * price_in_xlt) if price_in_ltc: _24h_vol_in_ltc = util_litecoin.round_out(e['vol_base'] * price_in_ltc) if _24h_vol_in_xlt is None or _24h_vol_in_ltc is None: #the base asset didn't have price data against LTC or XLT, or both...try against the quote asset instead price_summary_in_xlt, price_summary_in_ltc, price_in_xlt, price_in_ltc, aggregated_price_in_xlt, aggregated_price_in_ltc = \ get_xlt_ltc_price_info(quote_asset, mps_xlt_ltc, xlt_ltc_price, ltc_xlt_price, with_last_trades=0, start_dt=start_dt, end_dt=end_dt) if _24h_vol_in_xlt is None and price_in_xlt: _24h_vol_in_xlt = util_litecoin.round_out(e['vol_quote'] * price_in_xlt) if _24h_vol_in_ltc is None and price_in_ltc: _24h_vol_in_ltc = util_litecoin.round_out(e['vol_quote'] * price_in_ltc) pair_data[pair]['24h_vol_in_{}'.format(config.XLT.lower())] = _24h_vol_in_xlt #might still be None pair_data[pair]['24h_vol_in_{}'.format(config.LTC.lower())] = _24h_vol_in_ltc #might still be None #get % change stats -- start by getting the first trade directly before the 24h period starts prev_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset, "block_time": {'$lt': start_dt}}).sort('block_time', pymongo.DESCENDING).limit(1) latest_trade = mongo_db.trades.find({ "base_asset": base_asset, "quote_asset": quote_asset}).sort('block_time', pymongo.DESCENDING).limit(1) if not prev_trade.count(): #no previous trade before this 24hr period pair_data[pair]['24h_pct_change'] = None else: prev_trade = prev_trade[0] latest_trade = latest_trade[0] prev_trade_price = get_price(prev_trade['base_quantity_normalized'], prev_trade['quote_quantity_normalized']) latest_trade_price = get_price(latest_trade['base_quantity_normalized'], latest_trade['quote_quantity_normalized']) pair_data[pair]['24h_pct_change'] = ((latest_trade_price - prev_trade_price) / prev_trade_price) * 100 pair_data[pair]['last_updated'] = end_dt #print "PRODUCED", pair, pair_data[pair] mongo_db.asset_pair_market_info.update( {'base_asset': base_asset, 'quote_asset': quote_asset}, {"$set": pair_data[pair]}, upsert=True) #remove any old pairs that were not just updated mongo_db.asset_pair_market_info.remove({'last_updated': {'$lt': end_dt}}) logging.info("Recomposed 24h trade statistics for %i asset pairs: %s" % (len(pair_data), ', '.join(pair_data.keys())))
def get_pairs(quote_asset='XBJ', exclude_pairs=[], max_pairs=12, from_time=None): bindings = [] sql = '''SELECT (CASE WHEN forward_asset = ? THEN backward_asset ELSE forward_asset END) AS base_asset, (CASE WHEN backward_asset = ? THEN backward_asset ELSE forward_asset END) AS quote_asset, (CASE WHEN backward_asset = ? THEN (forward_asset || '/' || backward_asset) ELSE (backward_asset || '/' || forward_asset) END) AS pair, (CASE WHEN forward_asset = ? THEN backward_quantity ELSE forward_quantity END) AS bq, (CASE WHEN backward_asset = ? THEN backward_quantity ELSE forward_quantity END) AS qq ''' if from_time: sql += ''', block_time ''' sql += '''FROM order_matches ''' bindings += [quote_asset, quote_asset, quote_asset, quote_asset, quote_asset] if from_time: sql += '''INNER JOIN blocks ON order_matches.block_index = blocks.block_index ''' priority_quote_assets = [] for priority_quote_asset in config.QUOTE_ASSETS: if priority_quote_asset != quote_asset: priority_quote_assets.append(priority_quote_asset) else: break if len(priority_quote_assets) > 0: asset_bindings = ','.join(['?' for e in range(0,len(priority_quote_assets))]) sql += '''WHERE ((forward_asset = ? AND backward_asset NOT IN ({})) OR (forward_asset NOT IN ({}) AND backward_asset = ?)) '''.format(asset_bindings, asset_bindings) bindings += [quote_asset] + priority_quote_assets + priority_quote_assets + [quote_asset] else: sql += '''WHERE ((forward_asset = ?) OR (backward_asset = ?)) ''' bindings += [quote_asset, quote_asset] if len(exclude_pairs) > 0: sql += '''AND pair NOT IN ({}) '''.format(','.join(['?' for e in range(0,len(exclude_pairs))])) bindings += exclude_pairs if from_time: sql += '''AND block_time > ? ''' bindings += [from_time] sql += '''AND forward_asset != backward_asset AND status = ?''' bindings += ['completed', max_pairs] sql = '''SELECT base_asset, quote_asset, pair, SUM(bq) AS base_quantity, SUM(qq) AS quote_quantity FROM ({}) GROUP BY pair ORDER BY quote_quantity DESC LIMIT ?'''.format(sql) return util.call_jsonrpc_api('sql', {'query': sql, 'bindings': bindings})['result']
def get_user_rps(addresses): games = [] filters = [('status', '=', 'open'), ('source', 'IN', addresses)] rpss = util.call_jsonrpc_api('get_rps', {'filters': filters})['result'] for rps in rpss: games.append({ 'block_index': rps['block_index'], 'address': rps['source'], 'tx_hash': rps['tx_hash'], 'wager': rps['wager'], 'move': 0, 'counter_move': 0, 'status': 'open', 'possible_moves': rps['possible_moves'], 'expiration': rps['expire_index'] }) filters = [('tx0_address', 'IN', addresses), ('tx1_address', 'IN', addresses)] valid_status = [ 'pending', 'resolved and pending', 'pending and resolved', 'concluded: first player wins', 'concluded: second player wins', 'concluded: tie' ] params = { 'filters': filters, 'filterop': 'OR', 'status': valid_status, 'order_by': 'block_index', 'order_dir': 'DESC' } rps_matches = util.call_jsonrpc_api('get_rps_matches', params)['result'] resolved_bindings = [] match_games = {} for rps_match in rps_matches: if rps_match['status'] == 'concluded: tie': status = 'tie' elif rps_match['status'] in [ 'resolved and pending', 'pending and resolved' ]: status = 'resolved' else: status = 'pending' if rps_match['tx0_address'] in addresses: txn = 0 if rps_match['status'] == 'concluded: first player wins': status = 'win' elif rps_match['status'] == 'concluded: second player wins': status = 'lose' match_games[rps_match['tx0_address'] + "_" + rps_match['id']] = { 'block_index': rps_match['tx0_block_index'], 'address': rps_match['tx0_address'], 'tx_hash': rps_match['tx0_hash'], 'wager': rps_match['wager'], 'move': 0, 'counter_move': 0, 'status': 'pending' if status == 'resolved' else status, 'possible_moves': rps_match['possible_moves'], 'expiration': rps_match['match_expire_index'] } if rps_match['tx1_address'] in addresses: txn = 1 if rps_match['status'] == 'concluded: second player wins': status = 'win' elif rps_match['status'] == 'concluded: first player wins': status = 'lose' match_games[rps_match['tx1_address'] + "_" + rps_match['id']] = { 'block_index': rps_match['tx1_block_index'], 'address': rps_match['tx1_address'], 'tx_hash': rps_match['tx1_hash'], 'wager': rps_match['wager'], 'move': 0, 'counter_move': 0, 'status': 'pending' if status == 'resolved' else status, 'possible_moves': rps_match['possible_moves'], 'expiration': rps_match['match_expire_index'] } if status != 'pending': resolved_bindings.append(rps_match['id']) if len(resolved_bindings) > 0: filters = [('rps_match_id', 'IN', resolved_bindings)] params = { 'filters': filters, 'status': 'valid', 'order_by': 'block_index', 'order_dir': 'DESC' } rpsresolves = util.call_jsonrpc_api('get_rpsresolves', params)['result'] for rpsresolve in rpsresolves: rps_match_id = rpsresolve['rps_match_id'] game_key = rpsresolve['source'] + '_' + rps_match_id if game_key in match_games: match_games[game_key]['move'] = rpsresolve['move'] for countergame_key in match_games: if countergame_key != game_key and countergame_key.split( '_')[1] == rps_match_id: match_games[countergame_key]['counter_move'] = rpsresolve[ 'move'] break for match_games_key in match_games: games.append(match_games[match_games_key]) return games
def process_cpd_blockfeed(zmq_publisher_eventfeed): LATEST_BLOCK_INIT = {'block_index': config.BLOCK_FIRST, 'block_time': None, 'block_hash': None} mongo_db = config.mongo_db def blow_away_db(): """boom! blow away all applicable collections in mongo""" mongo_db.processed_blocks.drop() mongo_db.tracked_assets.drop() mongo_db.trades.drop() mongo_db.balance_changes.drop() mongo_db.asset_market_info.drop() mongo_db.asset_marketcap_history.drop() mongo_db.pair_market_info.drop() mongo_db.btc_open_orders.drop() mongo_db.asset_extended_info.drop() mongo_db.transaction_stats.drop() mongo_db.feeds.drop() mongo_db.wallet_stats.drop() #create/update default app_config object mongo_db.app_config.update({}, { 'db_version': config.DB_VERSION, #counterblockd database version 'running_testnet': config.TESTNET, 'clearinghoused_db_version_major': None, 'clearinghoused_db_version_minor': None, 'clearinghoused_running_testnet': None, 'last_block_assets_compiled': config.BLOCK_FIRST, #for asset data compilation in events.py (resets on reparse as well) }, upsert=True) app_config = mongo_db.app_config.find()[0] #DO NOT DELETE preferences and chat_handles and chat_history #create XCP and BTC assets in tracked_assets for asset in [config.XCP, config.BTC]: base_asset = { 'asset': asset, 'owner': None, 'divisible': True, 'locked': False, 'total_issued': None, '_at_block': config.BLOCK_FIRST, #the block ID this asset is current for '_history': [] #to allow for block rollbacks } mongo_db.tracked_assets.insert(base_asset) #reinitialize some internal counters config.CURRENT_BLOCK_INDEX = 0 config.LAST_MESSAGE_INDEX = -1 return app_config def prune_my_stale_blocks(max_block_index): """called if there are any records for blocks higher than this in the database? If so, they were impartially created and we should get rid of them NOTE: after calling this function, you should always trigger a "continue" statement to reiterate the processing loop (which will get a new last_processed_block from clearinghoused and resume as appropriate) """ logging.warn("Pruning to block %i ..." % (max_block_index)) mongo_db.processed_blocks.remove({"block_index": {"$gt": max_block_index}}) mongo_db.balance_changes.remove({"block_index": {"$gt": max_block_index}}) mongo_db.trades.remove({"block_index": {"$gt": max_block_index}}) mongo_db.asset_marketcap_history.remove({"block_index": {"$gt": max_block_index}}) mongo_db.transaction_stats.remove({"block_index": {"$gt": max_block_index}}) #to roll back the state of the tracked asset, dive into the history object for each asset that has # been updated on or after the block that we are pruning back to assets_to_prune = mongo_db.tracked_assets.find({'_at_block': {"$gt": max_block_index}}) for asset in assets_to_prune: logging.info("Pruning asset %s (last modified @ block %i, pruning to state at block %i)" % ( asset['asset'], asset['_at_block'], max_block_index)) prev_ver = None while len(asset['_history']): prev_ver = asset['_history'].pop() if prev_ver['_at_block'] <= max_block_index: break if prev_ver: if prev_ver['_at_block'] > max_block_index: #even the first history version is newer than max_block_index. #in this case, just remove the asset tracking record itself mongo_db.tracked_assets.remove({'asset': asset['asset']}) else: #if here, we were able to find a previous version that was saved at or before max_block_index # (which should be prev_ver ... restore asset's values to its values prev_ver['_id'] = asset['_id'] prev_ver['_history'] = asset['_history'] mongo_db.tracked_assets.save(prev_ver) config.CAUGHT_UP = False latest_block = mongo_db.processed_blocks.find_one({"block_index": max_block_index}) or LATEST_BLOCK_INIT return latest_block def publish_mempool_tx(): """fetch new tx from mempool""" tx_hashes = [] mempool_txs = mongo_db.mempool.find(fields={'tx_hash': True}) for mempool_tx in mempool_txs: tx_hashes.append(str(mempool_tx['tx_hash'])) params = None if len(tx_hashes) > 0: params = { 'filters': [ {'field':'tx_hash', 'op': 'NOT IN', 'value': tx_hashes}, {'field':'category', 'op': 'IN', 'value': ['sends', 'btcpays', 'issuances', 'dividends', 'callbacks']} ], 'filterop': 'AND' } new_txs = util.call_jsonrpc_api("get_mempool", params, abort_on_error=True) for new_tx in new_txs['result']: tx = { 'tx_hash': new_tx['tx_hash'], 'command': new_tx['command'], 'category': new_tx['category'], 'bindings': new_tx['bindings'], 'timestamp': new_tx['timestamp'], 'viewed_in_block': config.CURRENT_BLOCK_INDEX } mongo_db.mempool.insert(tx) del(tx['_id']) tx['_category'] = tx['category'] tx['_message_index'] = 'mempool' logging.debug("Spotted mempool tx: %s" % tx) zmq_publisher_eventfeed.send_json(tx) def clean_mempool_tx(): """clean mempool transactions older than MAX_REORG_NUM_BLOCKS blocks""" mongo_db.mempool.remove({"viewed_in_block": {"$lt": config.CURRENT_BLOCK_INDEX - config.MAX_REORG_NUM_BLOCKS}}) config.CURRENT_BLOCK_INDEX = 0 #initialize (last processed block index -- i.e. currently active block) config.LAST_MESSAGE_INDEX = -1 #initialize (last processed message index) config.BLOCKCHAIN_SERVICE_LAST_BLOCK = 0 #simply for printing/alerting purposes config.CAUGHT_UP_STARTED_EVENTS = False #^ set after we are caught up and start up the recurring events that depend on us being caught up with the blockchain #grab our stored preferences, and rebuild the database if necessary app_config = mongo_db.app_config.find() assert app_config.count() in [0, 1] if ( app_config.count() == 0 or config.REPARSE_FORCED or app_config[0]['db_version'] != config.DB_VERSION or app_config[0]['running_testnet'] != config.TESTNET): if app_config.count(): logging.warn("clearblockd database version UPDATED (from %i to %i) or testnet setting changed (from %s to %s), or REINIT forced (%s). REBUILDING FROM SCRATCH ..." % ( app_config[0]['db_version'], config.DB_VERSION, app_config[0]['running_testnet'], config.TESTNET, config.REPARSE_FORCED)) else: logging.warn("clearblockd database app_config collection doesn't exist. BUILDING FROM SCRATCH...") app_config = blow_away_db() my_latest_block = LATEST_BLOCK_INIT else: app_config = app_config[0] #get the last processed block out of mongo my_latest_block = mongo_db.processed_blocks.find_one(sort=[("block_index", pymongo.DESCENDING)]) or LATEST_BLOCK_INIT #remove any data we have for blocks higher than this (would happen if counterblockd or mongo died # or errored out while processing a block) my_latest_block = prune_my_stale_blocks(my_latest_block['block_index']) #start polling clearinghoused for new blocks while True: try: running_info = util.call_jsonrpc_api("get_running_info", abort_on_error=True) if 'result' not in running_info: raise AssertionError("Could not contact clearinghoused") running_info = running_info['result'] except Exception, e: logging.warn(str(e) + " -- Waiting 3 seconds before trying again...") time.sleep(3) continue if running_info['last_message_index'] == -1: #last_message_index not set yet (due to no messages in clearinghoused DB yet) logging.warn("No last_message_index returned. Waiting until clearinghoused has messages...") time.sleep(10) continue #wipe our state data if necessary, if clearinghoused has moved on to a new DB version wipeState = False updatePrefs = False if app_config['clearinghoused_db_version_major'] is None \ or app_config['clearinghoused_db_version_minor'] is None \ or app_config['clearinghoused_running_testnet'] is None: updatePrefs = True elif running_info['version_major'] != app_config['clearinghoused_db_version_major']: logging.warn("clearinghoused MAJOR DB version change (we built from %s, clearinghoused is at %s). Wiping our state data." % ( app_config['clearinghoused_db_version_major'], running_info['version_major'])) wipeState = True updatePrefs = True elif running_info['version_minor'] != app_config['clearinghoused_db_version_minor']: logging.warn("clearinghoused MINOR DB version change (we built from %s.%s, clearinghoused is at %s.%s). Wiping our state data." % ( app_config['clearinghoused_db_version_major'], app_config['clearinghoused_db_version_minor'], running_info['version_major'], running_info['version_minor'])) wipeState = True updatePrefs = True elif running_info.get('running_testnet', False) != app_config['clearinghoused_running_testnet']: logging.warn("clearinghoused testnet setting change (from %s to %s). Wiping our state data." % ( app_config['clearinghoused_running_testnet'], running_info['running_testnet'])) wipeState = True updatePrefs = True if wipeState: app_config = blow_away_db() if updatePrefs: app_config['clearinghoused_db_version_major'] = running_info['version_major'] app_config['clearinghoused_db_version_minor'] = running_info['version_minor'] app_config['clearinghoused_running_testnet'] = running_info['running_testnet'] mongo_db.app_config.update({}, app_config) #reset my latest block record my_latest_block = LATEST_BLOCK_INIT config.CAUGHT_UP = False #You've Come a Long Way, Baby #work up to what block clearinghoused is at last_processed_block = running_info['last_block'] if last_processed_block['block_index'] is None: logging.warn("clearinghoused has no last processed block (probably is reparsing). Waiting 3 seconds before trying again...") time.sleep(3) continue if my_latest_block['block_index'] < last_processed_block['block_index']: #need to catch up config.CAUGHT_UP = False cur_block_index = my_latest_block['block_index'] + 1 #get the blocktime for the next block we have to process try: cur_block = util.call_jsonrpc_api("get_block_info", {'block_index': cur_block_index}, abort_on_error=True)['result'] except Exception, e: logging.warn(str(e) + " Waiting 3 seconds before trying again...") time.sleep(3) continue cur_block['block_time_obj'] = datetime.datetime.utcfromtimestamp(cur_block['block_time']) cur_block['block_time_str'] = cur_block['block_time_obj'].isoformat() try: block_data = util.call_jsonrpc_api("get_messages", {'block_index': cur_block_index}, abort_on_error=True)['result'] except Exception, e: logging.warn(str(e) + " Waiting 5 seconds before trying again...") time.sleep(5) continue
def get_unspent_txouts(address, return_confirmed=False): result = util.call_jsonrpc_api('get_unspent_txouts', {'address': address, 'return_confirmed': return_confirmed}) return result['result']