def rpc_loop(ncurses_q, json_q): config = ConfigParser.ConfigParser() config.read('bitcoind-ncurses.conf') rpcuser = config.get('rpc', 'rpcuser') rpcpassword = config.get('rpc', 'rpcpassword') rpcip = config.get('rpc', 'rpcip') rpcport = config.get('rpc', 'rpcport') rpcurl = "http://" + rpcuser + ":" + rpcpassword + "@" + rpcip + ":" + rpcport rpchandle = AuthServiceProxy(rpcurl, None, 500) last_blockcount = 0 # ensures block info is updated initially last_update = time.time() - 2 while 1: try: s = json_q.get(False) except: s = {} if 'blockheight' in s: blockhash = rpchandle.getblockhash(s['blockheight']) blockinfo = rpchandle.getblock(blockhash) ncurses_q.put(blockinfo) last_blockcount = cur_blockcount elif 'txid' in s: raw_tx = rpchandle.getrawtransaction(s['txid']) decoded_tx = rpchandle.decoderawtransaction(raw_tx) ncurses_q.put(decoded_tx) if (time.time() - last_update) > 2: info = rpchandle.getinfo() ncurses_q.put(info) nettotals = rpchandle.getnettotals() ncurses_q.put(nettotals) walletinfo = rpchandle.getwalletinfo() ncurses_q.put(walletinfo) cur_blockcount = info['blocks'] if (cur_blockcount != last_blockcount): # minimise RPC calls #if (last_blockcount == 0): lastblocktime = {'lastblocktime': time.time()} ncurses_q.put(lastblocktime) blockhash = rpchandle.getblockhash(cur_blockcount) blockinfo = rpchandle.getblock(blockhash) ncurses_q.put(blockinfo) last_blockcount = cur_blockcount last_update = time.time() time.sleep(0.5) # minimise RPC calls
def handle_block(height): rpc_connection = AuthServiceProxy( "http://%s:%[email protected]:8332" % (rpc_user, rpc_password)) hash = rpc_connection.getblockhash(height) block = rpc_connection.getblock(hash, 2) cjs = np.zeros((0, 9)) txids = [] for tx in block['tx']: out_values = Counter([o['value'] for o in tx['vout']]) m_c = out_values.most_common() candidates = filter(lambda m: m[1] > 1, m_c) candidates = list(filter(lambda m: m[0] > 0, candidates)) if len(candidates) == 0: continue cj = candidates[0] addresses = [o['scriptPubKey']['addresses'] for o in tx['vout'] if 'addresses' in o['scriptPubKey'].keys()] addresses = [item for sublist in addresses for item in sublist] is_wasabi = wasabi_address in addresses has_op_return = any([out['scriptPubKey']['type'] == 'nulldata' for out in tx['vout']]) features = [height, len(tx['vin']), len( tx['vout']), cj[0], cj[1], max(out_values), min(out_values), has_op_return, is_wasabi] cjs = np.vstack((cjs, features)) txids.append(tx['txid']) # logging.info("processed {}, {} cjs".format(height, len(txids))) return height, cjs, txids
def do_RPC(env, send_resp): _, args, cur = urlparse.parse_qs( env['QUERY_STRING']), env['PATH_INFO'].split( '/')[2:], sqc.dbpool.get().cursor() send_resp('200 OK', [('Content-Type', 'application/json')]) result = [] if args[0] == "getblockcount": result = json.dumps(sqc.cfg['block']) elif args[0] == "getinfo": result = json.dumps({ 'blocks': sqc.cfg['block'], 'difficulty': bits2diff(gethdr(sqc.cfg['block'], sqc.cfg, 'bits')) }) elif args[0] == "getdifficulty": result = json.dumps( bits2diff(gethdr(sqc.cfg['block'], sqc.cfg, 'bits'))) else: rpc = AuthServiceProxy(sqc.cfg['rpc']) if args[0] == "getblock": result = json.dumps(rpc.getblock(args[1]), cls=btcEncoder) elif args[0] == "getblockhash": result = json.dumps(rpc.getblockhash(int(args[1]))) elif args[0] == "getrawtransaction": result = json.dumps(rpc.getrawtransaction(args[1], 1), cls=btcEncoder) elif args[0] == "gettxout": result = json.dumps(rpcTxOut(cur, args[1], args[2])) elif args[0] == "getmempoolinfo": result = json.dumps(rpc.getmempoolinfo(), cls=btcEncoder) elif args[0] == "getrawmempool": result = json.dumps(rpc.getrawmempool(False), cls=btcEncoder) return result
def query_transactions(ticker=None): if not ticker: for c in Currency.objects.all(): query_transactions.delay(c.ticker) return currency = Currency.objects.select_for_update().get(ticker=ticker) coin = AuthServiceProxy(currency.api_url) current_block = coin.getblockcount() processed_transactions = [] block_hash = coin.getblockhash(currency.last_block) transactions = coin.listsinceblock(block_hash)['transactions'] for tx in transactions: if tx['txid'] in processed_transactions: continue if tx['category'] not in ('receive', 'generate', 'immature'): continue process_deposite_transaction(tx, ticker) processed_transactions.append(tx['txid']) currency.last_block = current_block currency.save() for tx in Transaction.objects.filter(processed=False, currency=currency): query_transaction(ticker, tx.txid)
def getBlock(i, ctx): bc_client = ctx.obj['bitcoin_client'] rpc_connection = AuthServiceProxy('http://{}:{}@{}:{}'.format( bc_client['user'], bc_client['pwd'], bc_client['host'], bc_client['port'])) block_hash = rpc_connection.getblockhash(i) return rpc_connection.getblock(block_hash)
def do_RPC(env, send_resp): get, args, cur = urlparse.parse_qs(env["QUERY_STRING"]), env["PATH_INFO"].split("/")[2:], sqc.dbpool.get().cursor() send_resp("200 OK", [("Content-Type", "application/json")]) if args[0] == "getblockcount": return json.dumps(sqc.cfg["block"]) if args[0] == "getinfo": return json.dumps( {"blocks": sqc.cfg["block"], "difficulty": bits2diff(gethdr(sqc.cfg["block"], "bits", sqc.cfg["path"]))} ) if args[0] == "getdifficulty": return json.dumps(bits2diff(gethdr(sqc.cfg["block"], "bits", sqc.cfg["path"]))) rpc = AuthServiceProxy(sqc.cfg["rpc"]) if args[0] == "getblock": return json.dumps(rpc.getblock(args[1]), cls=btcEncoder) if args[0] == "getblockhash": return json.dumps(rpc.getblockhash(int(args[1]))) if args[0] == "getrawtransaction": return json.dumps(rpc.getrawtransaction(args[1], 1), cls=btcEncoder) if args[0] == "gettxout": return json.dumps(rpcTxOut(cur, args[1], args[2])) if args[0] == "getmempoolinfo": return json.dumps(rpc.getmempoolinfo(), cls=btcEncoder) if args[0] == "getrawmempool": return json.dumps(rpc.getrawmempool(False), cls=btcEncoder) return []
def get(self, height=None): try: bcd_server = AuthServiceProxy(rpc_url) blockhash = bcd_server.getblockhash(int(height)) self.write(blockhash) except Exception as e: print('Error', e) self.write(str(e).split(':')[0])
def main(): rpc_user = '******' rpc_password = '******' rpc = AuthServiceProxy(f'http://{rpc_user}:{rpc_password}@127.0.0.1:18332/') #rpc = AuthServiceProxy(f'http://{rpc_user}:{rpc_password}@172.17.0.2:18332/') print(rpc.getinfo()) best_block_hash = rpc.getbestblockhash() print(rpc.getblock(best_block_hash)) blhash = rpc.getblockhash(0) #blhashはブロックのhash文字列 bl = rpc.getblock(blhash) #blはブロック情報 print(bl) dummy_address = '2MudgRfNaaw96kqAWziZ5JGsPbo2pzQp7Jy' change_address = '2NAVrak22jX3DQyDqnoqdm5ZTak1RgXWPzo' filename = 'mark_token.btc.json' url='https://drive.google.com/file/d/1ZR6Q5sCM_acUpPy7s3d9GJH8I2Plh4FI/view?usp=sharing' with open(filename, 'rb') as f: data2 = f.read() hashdata=hashlib.sha256(data2).hexdigest() js={'file_hash':hashdata,'url':url} data=json.dumps(js).encode("UTF-8") while True: if len(data) >= 80: buffer = data[:80] data = data[80:] elif len(data) == 0: break else: buffer = data data = b'' first_unspent = rpc.listunspent()[0] txid = first_unspent['txid'] vout = first_unspent['vout'] input_amount = first_unspent['amount'] SATOSHI = Decimal("0.00000001") change_amount = input_amount - Decimal("0.005") - SATOSHI tx = rpc.createrawtransaction([{"txid": txid, "vout": vout}],[{change_address: change_amount}, {'data': hexlify(buffer).decode('utf-8')}, ]) tx = rpc.signrawtransactionwithwallet(tx)['hex'] rpc.sendrawtransaction(tx) block_hash = rpc.generatetoaddress(1, change_address)[0] block = rpc.getblock(block_hash) txs = block['tx'][1:] print(f'# of txs: {len(txs)}') pprint(txs) for tx_hash in txs: raw_tx = rpc.gettransaction(tx_hash)['hex'] decoded_tx = rpc.decoderawtransaction(raw_tx) # pprint(decoded_tx) print(decoded_tx['vout'][1]['scriptPubKey']['asm'])
def main(): while True: try: access = AuthServiceProxy(authserv) function = access.getnettotals() funcname = str("getnettotals") for subkey in ['totalbytesrecv', 'totalbytessent']: value = function[subkey] printValue("counter", funcname, funcname, subkey, value) function = access.getnetworkinfo() funcname = str("getnetworkinfo") subkey = str("connections") value = function[subkey] printValue("gauge", subkey, funcname, subkey, value) function = access.getmempoolinfo() funcname = str("getmempoolinfo") for subkey in ['size', 'bytes']: value = function[subkey] #without this it will appear "stacked" in CGP Panel funccat = (str(funcname) + "_" + str(subkey)) printValue("gauge", funccat, funcname, subkey, value) #since 0.12 estimatefee 1 fails. Use estimatefee 2 instead. #see https://github.com/bitcoin/bitcoin/issues/7545 function = access.estimatefee(2) funcname = str("estimatefee") value = function printValue("gauge", funcname, funcname, funcname, value) #get size, height, diff of the last block function = access.getblockcount() blockcount = function #get hash of last block function = access.getblockhash(blockcount) blockhash = function #get info from blockhash function = access.getblock(blockhash) funcname = str("getblock") for subkey in ['size', 'height', 'difficulty']: funccat = (str(funcname) + "_" + str(subkey)) value = function[subkey] printValue("gauge", funccat, funcname, subkey, value) #network hashrate function = access.getnetworkhashps() funcname = str("getnetworkhashps") value = function printValue("gauge", funcname, funcname, funcname, value) except: pass sys.stdout.flush() time.sleep(interval)
def search(search_term): try: rpc_user = get_bitcoin_rpc_username() rpc_pass = get_bitcoin_rpc_password() rpc_connection = AuthServiceProxy("http://%s:%[email protected]:8332" % (rpc_user, rpc_pass), timeout=10) results = {} results["type"] = "not_found" results["id"] = "" search_term = search_term.strip() # Try to get a block (by height) try: if (search_term.isdigit()): blockhash = rpc_connection.getblockhash(int(search_term)) results["type"] = "block" results["id"] = blockhash return results except JSONRPCException as e: pass # Try to get address try: if is_bitcoin_address(search_term): results["type"] = "addr" results["id"] = search_term return results except: pass # Try to get a block (by hash) try: block = rpc_connection.getblock(search_term) results["type"] = "block" results["id"] = search_term return results except JSONRPCException as e: pass # Try to get a transaction try: rawtx = rpc_connection.getrawtransaction(search_term) results["type"] = "tx" results["id"] = search_term return results except JSONRPCException as e: pass except Exception as e: results["type"] = "error" results["error_message"] = str(e) return results
def getLastBlock(cfg): blk = 0 while not done: try: # this tries to talk to bitcoind despite it being comatose rpc = AuthServiceProxy(cfg['rpc'], timeout=120) if blk == 0: blkinfo = rpc.getblockchaininfo() blk = blkinfo['blocks'] - 60 blkhash = rpc.getblockhash(blk) # trailing by 60 to avoid reorg problems return blk,blkhash except Exception, e: log( 'Blkdat rpc ' + str(e) + ' trying again' ) sleep(5)
def blockchain_scrape(start): #GETS THE TOTAL NUMBER OF BLOCKS MINED UNTIL NOW tip_raw = requests.get(api_1 + "blocks/tip/height") tip = tip_raw.json() for height in range(start, tip, 100): #GET BLOCK HASH access = AuthServiceProxy("http://%s:%[email protected]:8332" % ("{USER}", "{PASSWORD}")) block_hash = access.getblockhash(height) #GET 1st TXID IN THIS BLOCK txid_raw = requests.get(api_1 + "block/" + block_hash + "/txids") txids = txid_raw.json() txid_1 = txids[0] #GET INFO OF TXID#1 tx_info_raw = requests.get(api_1 + "tx/" + txid_1) tx_info = tx_info_raw.json() #GET CONFIRMATION THAT THIS TX IS COINBASE vin = tx_info["vin"] is_coinbase = vin[0]["is_coinbase"] coinbase_list.append(is_coinbase) #GET VALUE OF COINBASE TX vout = tx_info["vout"] v_list = [] for i in range( len(vout) ): #THERE ARE OCCASIONALLY SEVERAL OUTPUT ADDRESSES TO THE COINBASE TRANSACTION. value = vout[i][ "value"] #THIS CODE ENSURES ALL THOSE OUTPUTS ARE INCLUDED IN THE TOTAL VALUE OF THE BLOCK REWARD v_list.append(value) total_value = sum(v_list) value_list.append(total_value) #GET BLOCK TIMESTAMP status = tx_info["status"] timestamp = status["block_time"] timestamp_list.append(timestamp) block_height_list.append(height) print(height, end=' ')
def getblockhash(height): """taking in block height and return block hash""" with open("config.yml", 'r') as ymlfile: cfg = yaml.load(ymlfile) url = cfg['bitcoind']['url'] rpc_connection = AuthServiceProxy(url) blockhash = rpc_connection.getblockhash(height) print("blockhash of height " + str(height) + " = " + blockhash) print(rpc_connection.getblock(blockhash)) print(height) #print(rpc_connection.getrawtransaction('')) rawtransaction = rpc_connection.getrawtransaction( '75a98ce35b869772adbf643b3f8acadfa5b46b4cd8bfef26f9e079c517018285') print(rawtransaction) tx = Transaction.from_hex(rawtransaction) print(tx.json().dumps(parsed, indent=4, sort_keys=True))
class BitcoinCLI: def __init__(self): self.rpc_connection = AuthServiceProxy(config.endpoint) def get_best_block_hash(self): return self.rpc_connection.getbestblockhash() def get_block_count(self): return self.rpc_connection.getblockcount() def get_best_block(self): return self.rpc_connection.getblock(self.rpc_connection.getbestblockhash()) def get_block_hash(self, height): return self.rpc_connection.getblockhash(height) def get_block(self, hash): return self.rpc_connection.getblock(hash) def get_transaction(self, hash): return self.rpc_connection.gettransaction(hash) def get_txn_list_from_block(self, hash): block = self.get_block(hash) if 'tx' in block: return block['tx'] else: raise KeyError('Block {0} has no attribute tx'.format(hash)) def get_raw_transaction(self, tx_id): out = self.rpc_connection.getrawtransaction(tx_id, 1) return out def decoderawtransaction(self, tx_id): out = self.rpc_connection.decoderawtransaction(raw) return out def get_tx_outputs(self, tx_id): tx = self.rpc_connection.getrawtransaction(tx_id, 1) outputs = [float(i['value']) for i in tx['vout']] return outputs def get_tx_details(self, tx_id): tx = self.rpc_connection.getrawtransaction(tx_id, 1) outputs = [float(i['value']) for i in tx['vout']] return outputs
class ChainData: RPC_USER = RPC_NODE["user"] RPC_PASSWORD = RPC_NODE["password"] RPC_SERVER = RPC_NODE["server"] RPC_PORT = RPC_NODE["port"] MIN_BLOCK = CHAIN["start_block"] MIN_CONFIRMS = CHAIN["confirms"] COIN = CHAIN["name"] def __init__(self): self.rpc_conn = AuthServiceProxy( "http://%s:%s@%s:%s" % (self.RPC_USER, self.RPC_PASSWORD, self.RPC_SERVER, self.RPC_PORT)) def get_blocks(self): resp = self.rpc_conn.getblockchaininfo() return resp["blocks"] def get_headers(self): resp = self.rpc_conn.getblockchaininfo() return resp["headers"] def get_blockhash(self, block): resp = self.rpc_conn.getblockhash(block) return resp def _get_blocktransactions(self, block): blockhash = self.get_blockhash(block) resp = self.rpc_conn.getblock(blockhash, 2) return resp["tx"], blockhash def getblock_out_balances(self, block): txs, blockhash = self._get_blocktransactions(block) balances = [] for tx in txs: for iout in tx['vout']: if iout.get("scriptPubKey") and iout.get("scriptPubKey").get( "addresses"): balances.append( (iout["scriptPubKey"]["addresses"][0], iout["value"])) if len(iout["scriptPubKey"]["addresses"]) > 1: logger.error( "More than one address detected! block %s, addresses: %s" % (block, iout["scriptPubKey"]["addresses"])) return balances, blockhash
def get_block_txs(height): """ Method used to get tx hashes from block Args: block height (int) Returns: List of transaction ids (array) """ rpc = AuthServiceProxy( ("http://%s:%[email protected]:%s/") % (config['RPC_USER'], config['RPC_PASS'], config['RPC_PORT'])) block_hash = rpc.getblockhash(height) block = rpc.getblock(block_hash) return block['tx']
def query_transactions(ticker=None): # logger.info("Execute Query Transactions") if not ticker: #logger.warning("No ticker found. Performing Currency lookup.") for c in Currency.objects.all(): #logger.info("Deffered Query Transactions: {}".format(c.ticker)) query_transactions.delay(c.ticker) return # logger.info("Ticker found: {}".format(ticker)) currency = Currency.objects.select_for_update().get(ticker=ticker) coin = AuthServiceProxy(currency.api_url) # logger.info("RPC Call to {}".format(currency.api_url)) current_block = coin.getblockcount() # logger.info("Current Block: {}".format(current_block)) processed_transactions = [] block_hash = coin.getblockhash(currency.last_block) # logger.info("Block Hash: {}".format(block_hash)) blocklist = coin.listsinceblock(block_hash) # logger.info("Block List since {0} : {1}".format(block_hash, blocklist)) transactions = blocklist['transactions'] # logger.info("Transactions: {}".format(transactions)) for tx in transactions: if tx['txid'] in processed_transactions: continue if tx['category'] not in ('receive', 'generate', 'immature'): continue # logger.info("Processing Transactions: {}".format(tx)) process_deposite_transaction(tx, ticker) processed_transactions.append(tx['txid']) currency.last_block = current_block # log.info("Latest block: {}".format(current_block)) currency.save() for tx in Transaction.objects.filter(processed=False, currency=currency): # logger.info("Querying Tx: {0} -> {1}".format(ticker, tx.txid)) query_transaction(ticker, tx.txid)
class ChainData: RPC_USER = RPC_NODE["user"] RPC_PASSWORD = RPC_NODE["password"] RPC_SERVER = RPC_NODE["server"] RPC_PORT = RPC_NODE["port"] def __init__(self): self.db1 = db.DataBase() self.rpc_conn = AuthServiceProxy( "http://%s:%s@%s:%s" % (self.RPC_USER, self.RPC_PASSWORD, self.RPC_SERVER, self.RPC_PORT)) def get_blocks(self): resp = self.rpc_conn.getblockchaininfo() return resp["blocks"] def get_headers(self): resp = self.rpc_conn.getblockchaininfo() return resp["headers"] def get_blockhash(self, block): resp = self.rpc_conn.getblockhash(block) return resp def _get_blocktransactions(self, block): blockhash = self.get_blockhash(block) resp = self.rpc_conn.getblock(blockhash, 2) return resp["tx"], blockhash def getblock_out_addresses(self, block): txs, blockhash = self._get_blocktransactions(block) count = 0 for tx in txs: for iout in tx['vout']: if iout.get("scriptPubKey") and iout.get("scriptPubKey").get( "addresses"): addresses = iout["scriptPubKey"]["addresses"] for ad in addresses: count += 1 self.db1.add_unique(ad)
def main(height, best): rpc_connection = AuthServiceProxy( 'http://%s:%s@%s:%d' % (rpc_user, rpc_password, rpc_ip, rpc_port)) es = Elasticsearch("%s:%d" % (es_ip, es_port)) while height < best: print 'height: %d' % height try: block_hash = rpc_connection.getblockhash(height) block = rpc_connection.getblock(block_hash, 2) except Exception as e: print e time.sleep(3) rpc_connection = AuthServiceProxy( 'http://%s:%s@%s:%d' % (rpc_user, rpc_password, rpc_ip, rpc_port)) else: txs = block['tx'] txs_result = txs_to_es(rpc_connection, es, txs, block) if txs_result == 1: height += 1
class Coind: def __init__(self): # connect to the local coin daemon. self.access = AuthServiceProxy("http://%s:%[email protected]:12341" % ('RPCuser', 'RPCpassword')) def getblockhash(self, idx): b_hash = self.access.getblockhash(idx) return (b_hash) def getblock(self, b_hash): block = self.access.getblock(b_hash) #{ u'merkleroot': u'74a2d1db8db7dc5e65d3d6f2058a6f1b5e893ddaf87c4c98d1a008e406b9beae', # u'nonce': 122400, # u'previousblockhash': u'6850dc01c014a262018fe2e29c299fc33dfe6d47fe5ef2f7cfa5f51f10bc61b3', # u'hash': u'642b7b504b315dd12683eb132e9a536958a89c03638ebc7582ef4d50893f0b89', # u'version': 2, # u'tx': [ # u'46eb85610e8260a5eeccdfb14bf393b83ff704ccaca08e2dc639c2ebd9cdff57', # u'dd28e708147c66b2ebaa23bfcce436afddfcdd1a268867465389c8c6d114cf82' # ], # u'height': 176058, # u'difficulty': Decimal('587.97880435'), # u'nextblockhash': u'530761664af30cc6e119cef47222ff179982cdc6e5f1fd70d06bb72bafde649c', # u'confirmations': 7, # u'time': 1419937013, # u'bits': u'1b6f7546', # u'size': 1227 # } return (block) def gettransaction(self, t_hash): try: trans = self.access.gettransaction(t_hash) except: return (False) return (trans) def getblockcount(self): return (self.access.getblockcount())
class Coind: def __init__(self): # connect to the local coin daemon. self.access = AuthServiceProxy("http://%s:%[email protected]:12341"%('RPCuser', 'RPCpassword')) def getblockhash(self, idx): b_hash = self.access.getblockhash(idx) return(b_hash) def getblock(self, b_hash): block = self.access.getblock(b_hash) #{ u'merkleroot': u'74a2d1db8db7dc5e65d3d6f2058a6f1b5e893ddaf87c4c98d1a008e406b9beae', # u'nonce': 122400, # u'previousblockhash': u'6850dc01c014a262018fe2e29c299fc33dfe6d47fe5ef2f7cfa5f51f10bc61b3', # u'hash': u'642b7b504b315dd12683eb132e9a536958a89c03638ebc7582ef4d50893f0b89', # u'version': 2, # u'tx': [ # u'46eb85610e8260a5eeccdfb14bf393b83ff704ccaca08e2dc639c2ebd9cdff57', # u'dd28e708147c66b2ebaa23bfcce436afddfcdd1a268867465389c8c6d114cf82' # ], # u'height': 176058, # u'difficulty': Decimal('587.97880435'), # u'nextblockhash': u'530761664af30cc6e119cef47222ff179982cdc6e5f1fd70d06bb72bafde649c', # u'confirmations': 7, # u'time': 1419937013, # u'bits': u'1b6f7546', # u'size': 1227 # } return(block) def gettransaction(self, t_hash): try: trans = self.access.gettransaction(t_hash) except: return(False) return(trans) def getblockcount(self): return(self.access.getblockcount())
class User: avatar = "" fullname = "" location = "" updateTime = 0 class MyDb: lastBlockHash = 0 try: db = cPickle.load(open(dbFileName)) nextHash = db.lastBlockHash except: db = MyDb() db.usernames = {} nextHash = twister.getblockhash(0) while True: block = twister.getblock(nextHash) db.lastBlockHash = block["hash"] print str(block["height"]) + "\r", usernames = block["usernames"] for u in usernames: if not db.usernames.has_key(u): db.usernames[u] = User() if block.has_key("nextblockhash"): nextHash = block["nextblockhash"] else: break now = time.time()
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException # rpc_user and rpc_password are set in the bitcoin.conf file rpc_btc = AuthServiceProxy("http://%s:%[email protected]:8332"%('rpc_user', 'rpc_password')) blockchain = open('blockchain.txt', 'ab+') #you will need to make this blank file in your python folder before running the program for i in range(1,416570,1): #the range of blocks to grab data for, in this case blocks 1 to 416570, incrementing 1 block at a time get_block_hash = rpc_btc.getblockhash(i) block = rpc_btc.getblock(get_block_hash) coinbase = rpc_btc.decoderawtransaction(rpc_btc.getrawtransaction(block['tx'][0])) value = coinbase['vout'][0]['value'] #this gets total block reward print(i) blockchain.write(str(block['height'])+', '+str(value)+', '+str(block['hash'])+', '+str(block['size'])+', '+str(len(block['tx']))+', '+str(block['version'])+', '+str(block['merkleroot'])+', '+str(block['time'])+', '+str(block['nonce'])+', '+str(block['bits'])+', '+str(block['difficulty'])+', '+str(block['chainwork'])+'\n') blockchain.close() print('done')
class User: avatar = "" fullname = "" location = "" updateTime = 0 class MyDb: lastBlockHash = 0 try: db = cPickle.load(open(dbFileName)) nextHash = db.lastBlockHash except: db = MyDb() db.usernames = {} nextHash = commentchain.getblockhash(0) while True: block = commentchain.getblock(nextHash) db.lastBlockHash = block["hash"] print str(block["height"]) + "\r", usernames = block["usernames"] for u in usernames: if not db.usernames.has_key(u): db.usernames[u] = User() if block.has_key("nextblockhash"): nextHash = block["nextblockhash"] else: break now = time.time()
class SearchTransaction(): def __init__(self, from_block=0, qtum_host=qtum_host_def): self.qtum = AuthServiceProxy(qtum_host) self.from_block = from_block self.client = self.conection_stor() self.balance = ClientBalance(balance_server) def abi_to_params(self, abi, output_types): decode_hex = codecs.getdecoder("hex_codec") encode_hex = codecs.getencoder("hex_codec") data = decode_hex(abi)[0] return decode_abi(output_types, data) def search_transaction(self, txid, address_smart_contract, vouts): signatures = {'8c3ce5d7': ['makeCid({0[0]}, {0[1]}, {0[2]}, {0[3]}, {0[4]})', ['string', 'address', 'string', 'uint256', 'uint256'], self.new_cid], '65d72416': ['newOffer({0[0]}, {0[1]}, {0[2]}, {0[3]}, {0[4]})', ['uint256', 'address', 'uint256', 'uint256', 'string'], self.new_offer], '715c084b': ['sellContent({0[0]}, {0[1]}, {0[2]})', ['uint256', 'address', 'string', 'string'], self.confirm_balance], 'bbfd5e53': ['changeOwner({0[0]}, {0[1]}, {0[2]})', ['uint256', 'address', 'string', 'string'], self.confirm_balance], '41309af4': ["newReview({0[0]}, {0[1]}, {0[2]}, {0[3]})", ["uint256", "address", "string"], self.update_review], 'a9059cbb': ["", ['address', 'uint'], self.balance_put] } list_data = [] # hex_block = self.qtum.getrawtransaction(txid) # decode_block = self.qtum.decoderawtransaction(hex_block) # pprint(decode_block) # vouts = decode_block["vout"] for vout in vouts: script_pub_key = vout["scriptPubKey"] types = script_pub_key["type"] if types == "call": asm = script_pub_key["asm"] asm_split = asm.split() asm_data = asm_split[3] smart_contr_address = asm_split[4] if smart_contr_address in address_smart_contract: hex_address = asm_data[:8] data = asm_data[8:] signatures_list = signatures[hex_address] signatures_list_type = signatures_list[1] # signatures_list_text = signatures_list[0] # print(data, signatures_list_type) try: decode = self.abi_to_params(data, signatures_list_type) new_decode = self.change_decode(signatures_list_type, decode) data_write = [txid] + new_decode method = signatures_list[2] method_call = method(data_write) # print(map(signatures_list[2], data)) # print(data) # decode_string = signatures_list_text.format(new_decode) # print(decode_string) list_data += data except Exception as e: print(e) return list_data def change_decode(self, signatures_list_type, decode): decode = list(decode) if "address" in signatures_list_type: index_adr = signatures_list_type.index("address") decode_index_adr = decode[index_adr] new_adr = decode_index_adr[2:] decode[index_adr] = new_adr if "string" in signatures_list_type: index_str = signatures_list_type.index("string") decode_index_str = decode[index_str] new_str = decode_index_str.decode() decode[index_str] = new_str return decode def block_hash_num(self, block=None): # get block hash try: if not block: block = self.from_block block_hash = self.qtum.getblockhash(block) return block_hash except: pass def get_transaction_in_block(self, block_hash=None): # get list transaction in block try: if not block_hash: block_hash = self.block_hash_num() block = self.qtum.getblock(block_hash) list_tx = block["tx"] return list_tx except: pass def get_raw_transaction(self, transaction_blocks=None): # get raw transaction try: if not transaction_blocks: transaction_blocks = self.get_transaction_in_block() transaction_list = [] for transaction_block in transaction_blocks: try: transaction_data = [self.qtum.getrawtransaction(transaction_block)] transaction_data += [transaction_block] transaction_list += [transaction_data] except JSONRPCException: try: transaction_data = [self.qtum.gettransaction(transaction_block)] transaction_data += [transaction_block] transaction_list += [transaction_data] except JSONRPCException: pass # print(transaction_block) return transaction_list except: pass def decode_raw_transaction(self, address_smart_contract, encoded_datas=None): # decode raw transaction try: if not encoded_datas: try: encoded_datas = self.get_raw_transaction() except: pass for encoded_data in encoded_datas: try: # encoded_data = encoded_datas[2] # print(encoded_data) transaction_data = self.qtum.decoderawtransaction(encoded_data[0]) # print(transaction_data) # vin = transaction_data["vin"] # print(vin) txid = encoded_data[1] vout = transaction_data["vout"] # print(vout) result = self.search_transaction(txid, address_smart_contract, vout) except: pass except: pass def balance_put(self, data): txid = data[0] address = data[1] address = Qtum.hex_to_qtum_address(address, mainnet=False) ammount = data[2] check = self.balance.get_balance(address, "PUT") if type(check) == list: update_data_1 = self.balance.inc_balance(address, ammount, "PUT") def new_cid(self, data): tx_hash = data[0] cid = data[1] result = self.client.update_users_content(txid=tx_hash) def new_offer(self, data): txid = data[0] cid = data[1] address = data[2] offer_type = data[3] price = data[4] self.client.update_offer(txid) mail = self.client.mailed_confirm(cid=cid, buyer_address=address, offer_type=offer_type, price=price) def confirm_balance(self, data): tx_hash = data[0] cid = data[1] address = data[2] result = self.balance.confirm_balance(txid=tx_hash, cid=cid, buyer_address=address) def update_review(self, data): txid = data[0] self.client.update_review(txid) def conection_stor(self): while True: try: cli = ClientStorge(storghost) return cli except: sleep(1) def run(self, from_i, address_smart_contract): while True: getlastblock = self.qtum.getblockcount() # print(getlastblock) if getlastblock >= from_i: pars = SearchTransaction(from_i) result = pars.decode_raw_transaction(address_smart_contract) print(from_i) from_i += 1 else: sleep(1)
class RpcClient: def __init__(self): self.rpc_ip, self.rpc_port, self.rpc_user, self.rpc_passwd = readRPCfile() rpc_url = "http://%s:%s@%s:%d" % (self.rpc_user, self.rpc_passwd, self.rpc_ip, self.rpc_port) try: self.conn = AuthServiceProxy(rpc_url, timeout=8) except JSONRPCException as e: err_msg = 'remote or local PIVX-cli running?' printException(getCallerName(), getFunctionName(), err_msg, e) except Exception as e: err_msg = 'remote or local PIVX-cli running?' printException(getCallerName(), getFunctionName(), err_msg, e) def decodeRawTransaction(self, rawTx): try: return self.conn.decoderawtransaction(rawTx) except Exception as e: err_msg = 'error in decodeRawTransaction' printException(getCallerName(), getFunctionName(), err_msg, e.args) def getAddressUtxos(self, addresses): try: return self.conn.getaddressutxos({'addresses': addresses}) except Exception as e: err_msg = "error in getAddressUtxos" if str(e.args[0]) != "Request-sent": printException(getCallerName(), getFunctionName(), err_msg, e.args) else: printException(getCallerName(), getFunctionName(), err_msg, e.args) raise e def getBlockCount(self): try: n = self.conn.getblockcount() return n except Exception as e: err_msg = 'remote or local PIVX-cli running?' if str(e.args[0]) != "Request-sent": printException(getCallerName(), getFunctionName(), err_msg, e.args) else: printException(getCallerName(), getFunctionName(), err_msg, e.args) def getBlockHash(self, blockNum): try: h = self.conn.getblockhash(blockNum) return h except Exception as e: err_msg = 'remote or local PIVX-cli running?' if str(e.args[0]) != "Request-sent": printException(getCallerName(), getFunctionName(), err_msg, e.args) else: printException(getCallerName(), getFunctionName(), err_msg, e.args) def getFeePerKb(self): try: # get transaction data from last 10 blocks feePerKb = float(self.conn.getfeeinfo(10)['feeperkb']) return (feePerKb if feePerKb > MINIMUM_FEE else MINIMUM_FEE) except Exception as e: err_msg = 'error in getFeePerKb' if str(e.args[0]) != "Request-sent": printException(getCallerName(), getFunctionName(), err_msg, e.args) else: printException(getCallerName(), getFunctionName(), err_msg, e.args) def getMNStatus(self, address): try: mnStatusList = self.conn.listmasternodes(address) if not mnStatusList: return None mnStatus = mnStatusList[0] mnStatus['mnCount'] = self.conn.getmasternodecount()['enabled'] return mnStatus except Exception as e: err_msg = "error in getMNStatus" if str(e.args[0]) != "Request-sent": printException(getCallerName(), getFunctionName(), err_msg, e.args) else: printException(getCallerName(), getFunctionName(), err_msg, e.args) def getProtocolVersion(self): try: prot_version = self.conn.getinfo().get('protocolversion') return int(prot_version) except Exception as e: err_msg = 'error in getProtocolVersion' printException(getCallerName(), getFunctionName(), err_msg, e.args) return DEFAULT_PROTOCOL_VERSION def getRawTransaction(self, txid): try: return self.conn.getrawtransaction(txid) except Exception as e: err_msg = "is Blockchain synced?" if str(e.args[0]) != "Request-sent": printException(getCallerName(), getFunctionName(), err_msg, e.args) return None def getStatus(self): status = False n = 0 try: n = self.conn.getblockcount() if n > 0: status = True except Exception as e: # If loading block index set lastBlock=1 if str(e.args[0]) == "Loading block index..." or str(e.args[0]) == "Verifying blocks...": printDbg(str(e.args[0])) n = 1 #else: #err_msg = "Error while contacting RPC server" #printException(getCallerName(), getFunctionName(), err_msg, e.args) return status, n def getStatusMess(self, status=None): if status == None: status = self.getStatus() if status: return "RPC status: CONNECTED!!!" else: return "RPC status: NOT CONNECTED. remote or local PIVX-cli running?" def isBlockchainSynced(self): try: return self.conn.mnsync('status').get("IsBlockchainSynced") except Exception as e: err_msg = "error in isBlockchainSynced" printException(getCallerName(), getFunctionName(), err_msg, e.args) return False def decodemasternodebroadcast(self, work): try: return self.conn.decodemasternodebroadcast(work.strip()) except Exception as e: err_msg = "error in decodemasternodebroadcast" printException(getCallerName(), getFunctionName(), err_msg, e.args) return "" def relaymasternodebroadcast(self, work): try: return self.conn.relaymasternodebroadcast(work.strip()) except Exception as e: err_msg = "error in relaymasternodebroadcast" printException(getCallerName(), getFunctionName(), err_msg, e.args) return "" def sendRawTransaction(self, tx_hex): try: tx_id = self.conn.sendrawtransaction(tx_hex) return tx_id except Exception as e: err_msg = 'error in rpcClient.sendRawTransaction' if str(e.args[0]) != "Request-sent": printException(getCallerName(), getFunctionName(), err_msg, e.args) else: printException(getCallerName(), getFunctionName(), err_msg, e.args) def verifyMessage(self, pivxaddress, signature, message): try: return self.conn.verifymessage(pivxaddress, signature, message) except Exception as e: err_msg = "error in verifyMessage" printException(getCallerName(), getFunctionName(), err_msg, e.args)
class RPCConnection: """Creates a continuous connection to the bitcoind RPC interface. RPC connections are configured by the configuration file referred to by `CONFIG_FILENAME`. Attributes: conn (`AuthServiceProxy`): A connection to the RPC interface. """ def __init__(self): config_parser = ConfigParser.ConfigParser() config_parser.read(CONFIG_FILENAME) username = config_parser.get(section='RPC', option='rpc_username') password = config_parser.get(section='RPC', option='rpc_password') host = config_parser.get(section='RPC', option='rpc_host') port = config_parser.get(section='RPC', option='rpc_port') self.conn = AuthServiceProxy("http://%s:%s@%s:%s" % (username, password, host, port)) def get_block_hash_at_height(self, block_height): """Get the hash of the block at the specified block height.""" return self.conn.getblockhash(block_height) def get_json_for_block_hash(self, block_hash): """Get a JSON represntation of the specified block.""" return self.conn.getblock(block_hash) def get_tx_ids_at_height(self, block_height): """Get a list of transaction IDs contained in the specified block.""" block_hash = self.get_block_hash_at_height(block_height) tx_json = self.get_json_for_block_hash(block_hash) tx_ids = [] for tx_id in tx_json['tx']: tx_ids.append(tx_id) return tx_ids def get_raw_tx(self, tx_id): """Return transaction in raw format. If the requested transaction is the sole transaction of the genesis block, bitcoind's RPC interface will throw an error 'No information available about transaction (code -5)' so we preempt this by raising a custom error that callers should handle; iterating callers should just move onto the next tx. """ if tx_id == ('4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7af' 'deda33b'): raise IndexError else: return self.conn.getrawtransaction(tx_id) def get_decoded_tx(self, tx_id): """Gets the transaction in JSON format from the RPC interface.""" try: return self.conn.decoderawtransaction(self.get_raw_tx(tx_id)) except IndexError: #bitcoind won't generate this, but here's what it would look like genesis_json = { 'txid': ('4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2' '127b7afdeda33b'), 'version': 1, 'locktime': 0, 'vin': [{ "sequence":4294967295, 'coinbase': ('04ffff001d0104455468652054696d65732030332f4a6' '16e2f32303039204368616e63656c6c6f72206f6e2062' '72696e6b206f66207365636f6e64206261696c6f75742' '0666f722062616e6b73') }], 'vout': [ { 'value': 50.00000000, 'n': 0, 'scriptPubKey': { 'asm': ('04678afdb0fe5548271967f1a67130b7105cd6a828' 'e03909a67962e0ea1f61deb649f6bc3f4cef38c4f3' '5504e51ec112de5c384df7ba0b8d578a4c702b6bf1' '1d5f OP_CHECKSIG'), 'hex': ('4104678afdb0fe5548271967f1a67130b7105cd6a8' '28e03909a67962e0ea1f61deb649f6bc3f4cef38c4' 'f35504e51ec112de5c384df7ba0b8d578a4c702b6b' 'f11d5fac'), 'reqSigs': 1, 'type': 'pubkey', 'addresses': ['1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa'] } } ] } return genesis_json
ff.write("h1{") ff.write("color: #08c;") ff.write("font-size: 42px;") ff.write("font-weight: 200;") ff.write("letter-spacing: -0.01em;") ff.write("line-height: 1.14286;") ff.write("margin-bottom: 18px;") ff.write("}") ff.write("</style>") ff.write("<div id='wrap'>") ff.write("<h1>Paycoin Node: " + node_ip + ":8998<br \></h1>") ff.write("<h3>") ff.write("Last Updated: " + time.strftime("%l:%M:%S %p (%z %Z) on %-d %b %Y") + "<br \>\n") ff.write("Node Version: " + str(info['version']) + "<br \>\n") ff.write("Protocol Version: " + str(info['protocolversion']) + "<br \>\n") ff.write("Connections: " + str(info['connections']) + "<br \>\n") ff.write("Blocks: <a href='https://ledger.paycoin.com/block/" + str(access.getblockhash(info['blocks'])) + "'>" + str(info['blocks']) + "</a><br \>\n") ff.write("Location: " + node_location + "<br />") ff.write("Node created by " + node_name + "<br />") ff.write("Donate: <a href='https://ledger.paycoin.com/address/" + donation_xpy_addr + "'>") ff.write(donation_xpy_addr + "</a>") ff.write("</h3>") ff.write("<img src='http://qrfree.kaywa.com/?l=1&s=4&d=" + donation_xpy_addr + "' alt='QRCode'>") ff.write("</div>") ff.write("</body></html>") ff.close()
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException import config import logging logging.basicConfig() logging.getLogger("BitcoinRPC").setLevel(logging.DEBUG) # rpc_user and rpc_password are set in the bitcoin.conf file rpc_connection = AuthServiceProxy("http://%s:%[email protected]:8332"%(config.rpcuser, config.rpcpassword)) block_hash = rpc_connection.getblockhash(1) print(block_hash) block = rpc_connection.getblock(block_hash) print(block) #// get the first block #//get raw data #// convert it to hex #// convert to ascii #// print output #// check if it has any english words
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException rpc_connection = AuthServiceProxy("http://%s:%[email protected]:8332" % ('username', 'password'), timeout=500) for i in range(0, 567032): bbh = rpc_connection.getblockhash(i) bh = rpc_connection.getblockheader(bbh) print bh["height"], bh["time"], bh["nTx"], bh["difficulty"], bh["nonce"]
def getBlock(i): rpc_connection = AuthServiceProxy("http://%s:%[email protected]:8332" % getCredentials()) block_hash = rpc_connection.getblockhash(i) return rpc_connection.getblock(block_hash)
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException CSVFiles = "D:\BTC CSV" rpc_connection = AuthServiceProxy("http://%s:%[email protected]:8332"%("username", "password")) blockCount = rpc_connection.getblockcount() for i in range(100000, 100100):#blockCount): currentHash = rpc_connection.getblockhash(i) currentBlock = rpc_connection.getblock(currentHash) currentTime = currentBlock['time'] for transaction in currentBlock['tx']: txInfo = rpc_connection.getrawtransaction(transaction) txDecoded = rpc_connection.decoderawtransaction(txInfo) processed = [txDecoded['txid'], currentTime, txDecoded['weight']] for output in txDecoded['vout']: processed.append(str(output['value'])) print("Processed: " + str(processed)) print("Done with block " + str(i))
class Bitcoind_Real: """ Connection to a Bitcoin daemon process. """ def __init__(self, settings): """ Arguments: settings: a settings object; must contain the attribute bitcoinRPCURL. Connects to a Bitcoin daemon process, indicated by settings.bitcoinRPCURL. If settings.bitcoinRPCURL is empty, this object will not be connected. """ if settings.bitcoinRPCURL != "": log.log("Making connection to Bitcoin daemon...") self.access = AuthServiceProxy(settings.bitcoinRPCURL) log.log("...done") else: log.log("Bitcoin-RPC URL is not set: not connecting") self.access = None def isConnected(self): """ Return value: bool Returns whether this object is connected. """ return self.access != None def getBalance(self): """ Return value: int, in Satoshi Returns the balance. """ return self.DecimaltoAmount(self.access.getbalance()) def getBlockCount(self): """ Return value: int Returns the block count. """ return self.access.getblockcount() def getNewAddress(self): """ Return value: str, Base58Check-encoded address Generates and returns a new address. """ return self.access.getnewaddress() def getPrivateKey(self, address): """ Arguments: address: str, Base58Check-encoded address Return value: str, Base58Check-encoded private key Returns the private key corresponding to the given address. """ return self.access.dumpprivkey(address) def getBlockInfoByBlockHeight(self, height): """ Arguments: height: int Return value: dict; containing: hash: str; the block hash (hexadecimal) merkleroot: str; the block Merkle root (hexadecimal, Bitcoin hash byte order) time: int; the block timestamp (UNIX time) Returns information about the block (in the main chain) at the given height. """ bhash = self.access.getblockhash(height) binfo = self.access.getblock(bhash) return {"hash": binfo["hash"], "merkleroot": binfo["merkleroot"], "time": binfo["time"]} def getTransactionHashesByBlockHeight(self, height): """ Arguments: height: int Return value: list of str, hexadecimal, Bitcoin hash byte order Returns the transaction hashes in the block (in the main chain) at the given height. """ bhash = self.access.getblockhash(height) block = self.access.getblock(bhash) return block["tx"] def getTransaction(self, thash): """ Arguments: thash: str, hexadecimal, Bitcoin hash byte order Return value: dict, containing: vin: list of dict, each element containing: coinbase [only for coinbase transactions] txid [only for non-coinbase transactions]: str, hexadecimal, Bitcoin hash byte order hash of input transaction hex: str, hexadecimal, serialization of the transaction confirmations: int, number of confirmations Returns information about the transaction indicated by the given hash. """ return self.access.getrawtransaction(thash, 1) def importprivkey(self, privateKey, description, rescan): return self.access.importprivkey(privateKey, description, rescan) def listUnspent(self): """ Return value: list of dict, each element containing: address: str, Base58Check-encoded address amount: int, in Satoshi scriptPubKey: str, binary txid: str, binary, OpenSSL byte order vout: int Returns information about the available unspent transaction outputs. """ ret = self.access.listunspent() for vout in ret: vout["txid"] = binascii.unhexlify(vout["txid"])[::-1] # reversed; TODO: is this the right place? vout["scriptPubKey"] = binascii.unhexlify(vout["scriptPubKey"]) vout["amount"] = self.DecimaltoAmount(vout["amount"]) return ret def sendRawTransaction(self, txData): """ Arguments: txData: str, binary Send the given serialized transaction over the Bitcoin network. """ try: self.access.sendrawtransaction(txData.encode("hex")) except JSONRPCException as e: if e.error["code"] == RPC_TRANSACTION_ALREADY_IN_CHAIN: # It's perfectly fine (but very unlikely) that the transaction is # already in the block chain. # After all, we WANT it to end up in the block chain! pass else: raise def DecimaltoAmount(self, value): return int(value * 100000000) def handleMessage(self, msg): return [ messages.BitcoinReturnValue(value=msg.function(self), ID=msg.returnID, channelIndex=msg.returnChannelIndex) ]
class DashdInterface(WndUtils): def __init__(self, config, window, connection=None, on_connection_begin_callback=None, on_connection_try_fail_callback=None, on_connection_finished_callback=None): WndUtils.__init__(self, app_path=config.app_path) assert isinstance(config, AppConfig) self.config = config # conn configurations are used from the first item in the list; if one fails, then next is taken if connection: # this parameter is used for testing specific connection self.connections = [connection] else: # get connection list orderd by priority of use self.connections = self.config.get_ordered_conn_list() self.cur_conn_index = 0 if self.connections: self.cur_conn_def = self.connections[self.cur_conn_index] else: self.cur_conn_def = None # below is the connection with which particular RPC call has started; if connection is switched because of # problems with some nodes, switching stops if we close round and return to the starting connection self.starting_conn = None self.ssh = None self.window = window self.active = False self.rpc_url = None self.proxy = None self.http_conn = None # HTTPConnection object passed to the AuthServiceProxy (for convinient connection reset) self.on_connection_begin_callback = on_connection_begin_callback self.on_connection_try_fail_callback = on_connection_try_fail_callback self.on_connection_finished_callback = on_connection_finished_callback self.last_error_message = None def apply_new_cfg(self): """ Called after any of connection config changed. """ # get connection list orderd by priority of use self.disconnect() self.connections = self.config.get_ordered_conn_list() self.cur_conn_index = 0 if not len(self.connections): raise Exception( 'There is no connections to Dash network enabled in the configuration.' ) self.cur_conn_def = self.connections[self.cur_conn_index] def disconnect(self): if self.active: if self.ssh: self.ssh.disconnect() del self.ssh self.ssh = None self.active = False def mark_call_begin(self): self.starting_conn = self.cur_conn_def def switch_to_next_config(self): """ If there is another dashd config not used recently, switch to it. Called only when there was a problem with current connection config. :return: True if successfully switched ot False if there was no another config """ if self.cur_conn_def: self.config.conn_cfg_failure( self.cur_conn_def) # mark connection as defective if self.cur_conn_index < len(self.connections) - 1: idx = self.cur_conn_index + 1 else: idx = 0 conn = self.connections[idx] if conn != self.starting_conn: self.disconnect() self.cur_conn_index = idx self.cur_conn_def = conn if not self.open(): return self.switch_to_next_config() else: return True else: return False def mark_cur_conn_cfg_is_ok(self): if self.cur_conn_def: self.config.conn_cfg_success(self.cur_conn_def) def open(self): """ Opens connection to dash RPC. If it fails, then the next enabled conn config will be used, if any exists. :return: True if successfully connected, False if user cancelled the operation. If all of the attempts fail, then appropriate exception will be raised. """ try: if not self.cur_conn_def: raise Exception( 'There is no connections to Dash network enabled in the configuration.' ) while True: try: if self.open_internal(): break else: if not self.switch_to_next_config(): return False except UserCancelledConnection: return False except (socket.gaierror, ConnectionRefusedError, TimeoutError, socket.timeout) as e: # exceptions raised by not likely functioning dashd node; try to switch to another node # if there is any in the config if not self.switch_to_next_config(): raise e # couldn't use another conn config, raise exception else: break except Exception as e: self.last_error_message = str(e) raise return True def open_internal(self): """ Try to establish connection to dash RPC daemon for current connection config. :return: True, if connection successfully establishes, False if user Cancels the operation (not always cancelling will be possible - only when user is prompted for a password). """ if not self.active: if self.cur_conn_def.use_ssh_tunnel: # RPC over SSH while True: self.ssh = DashdSSH( self.cur_conn_def.ssh_conn_cfg.host, self.cur_conn_def.ssh_conn_cfg.port, self.cur_conn_def.ssh_conn_cfg.username) try: logging.info('starting ssh.connect') self.ssh.connect() logging.info('finished ssh.connect') break except Exception as e: logging.error('error in ssh.connect') raise # configure SSH tunnel # get random local unprivileged port number to establish SSH tunnel success = False local_port = None for try_nr in range(1, 10): try: logging.info('beginning ssh.open_tunnel') local_port = randint(2000, 50000) self.ssh.open_tunnel(local_port, self.cur_conn_def.host, int(self.cur_conn_def.port)) success = True break except Exception as e: logging.error('error in ssh.open_tunnel loop') pass logging.info('finished ssh.open_tunnel loop') if not success: logging.error('finished ssh.open_tunnel loop with error') return False else: rpc_user = self.cur_conn_def.username rpc_password = self.cur_conn_def.password rpc_host = '127.0.0.1' # SSH tunnel on loopback rpc_port = local_port else: # direct RPC rpc_host = self.cur_conn_def.host rpc_port = self.cur_conn_def.port rpc_user = self.cur_conn_def.username rpc_password = self.cur_conn_def.password if self.cur_conn_def.use_ssl: self.rpc_url = 'https://' self.http_conn = httplib.HTTPSConnection( rpc_host, rpc_port, timeout=5, context=ssl._create_unverified_context()) else: self.rpc_url = 'http://' self.http_conn = httplib.HTTPConnection(rpc_host, rpc_port, timeout=5) logging.info('AuthServiceProxy begin') self.rpc_url += rpc_user + ':' + rpc_password + '@' + rpc_host + ':' + str( rpc_port) self.proxy = AuthServiceProxy(self.rpc_url, timeout=1000, connection=self.http_conn) logging.info('AuthServiceProxy end') try: if self.on_connection_begin_callback: try: # make the owner know, we are connecting logging.info('on_connection_begin_callback begin') self.on_connection_begin_callback() logging.info('on_connection_begin_callback end') except: pass # check the connection logging.info('starting http_conn.connect()') self.http_conn.connect() logging.info('finished http_conn.connect()') if self.on_connection_finished_callback: try: # make the owner know, we successfully finished connection self.on_connection_finished_callback() except: pass except: if self.on_connection_try_fail_callback: try: # make the owner know, connection attempt failed self.on_connection_try_fail_callback() except: pass raise finally: logging.info('http_conn.close()') self.http_conn.close() # timeout hase been initially set to 5 seconds to perform 'quick' connection test self.http_conn.timeout = 20 self.active = True return self.active def get_active_conn_description(self): if self.cur_conn_def: return self.cur_conn_def.get_description() else: return '???' @control_rpc_call def getblockcount(self): if self.open(): return self.proxy.getblockcount() else: raise Exception('Not connected') @control_rpc_call def getblockhash(self, block): if self.open(): return self.proxy.getblockhash(block) else: raise Exception('Not connected') @control_rpc_call def getinfo(self): if self.open(): return self.proxy.getinfo() else: raise Exception('Not connected') @control_rpc_call def issynchronized(self): if self.open(): # if connecting to HTTP(S) proxy do not check if dash daemon is synchronized if self.cur_conn_def.is_http_proxy(): return True else: syn = self.proxy.mnsync('status') return syn.get('IsSynced') else: raise Exception('Not connected') @control_rpc_call def mnsync(self): if self.open(): # if connecting to HTTP(S) proxy do not call this function - it will not be exposed if self.cur_conn_def.is_http_proxy(): return {} else: return self.proxy.mnsync('status') else: raise Exception('Not connected') @control_rpc_call def masternodebroadcast(self, what, hexto): if self.open(): return self.proxy.masternodebroadcast(what, hexto) else: raise Exception('Not connected') @control_rpc_call def get_masternodelist(self, *args): if self.open(): return self.proxy.masternodelist(*args) else: raise Exception('Not connected') @control_rpc_call def get_masternodeaddr(self): if self.open(): return self.proxy.masternodelist('addr') else: raise Exception('Not connected') @control_rpc_call def getaddressbalance(self, address): if self.open(): return self.proxy.getaddressbalance({ 'addresses': [address] }).get('balance') else: raise Exception('Not connected') @control_rpc_call def getaddressutxos(self, addresses): if self.open(): return self.proxy.getaddressutxos({'addresses': addresses}) else: raise Exception('Not connected') @control_rpc_call def getrawtransaction(self, txid, verbose): if self.open(): return self.proxy.getrawtransaction(txid, verbose) else: raise Exception('Not connected') @control_rpc_call def getblockhash(self, blockid): if self.open(): return self.proxy.getblockhash(blockid) else: raise Exception('Not connected') @control_rpc_call def getblockheader(self, blockhash): if self.open(): return self.proxy.getblockheader(blockhash) else: raise Exception('Not connected') @control_rpc_call def validateaddress(self, address): if self.open(): return self.proxy.validateaddress(address) else: raise Exception('Not connected') @control_rpc_call def decoderawtransaction(self, tx): if self.open(): return self.proxy.decoderawtransaction(tx) else: raise Exception('Not connected') @control_rpc_call def sendrawtransaction(self, tx): if self.open(): return self.proxy.sendrawtransaction(tx) else: raise Exception('Not connected')
dbfilename = 'data/usernames.db' cacheTimeout = 24*3600 try: from bitcoinrpc.authproxy import AuthServiceProxy except ImportError as exc: sys.stderr.write("Error: install python-bitcoinrpc (https://github.com/jgarzik/python-bitcoinrpc)\n") exit(-1) twister = AuthServiceProxy(main_config['rpc_url']) try: db = anydbm.open(os.path.expanduser(dbfilename), 'c') if not 'lastblockhash' in db.keys(): db['lastblockhash'] = twister.getblockhash(0) nextHash = db['lastblockhash'] except ImportError as exc: sys.stderr.write("Did not manage to open databases\n") exit(-1) while True: block = twister.getblock(nextHash) db['lastblockhash'] = block["hash"] #print str(block["height"]) + "\r", usernames = block["usernames"] for u in usernames: if not str(u) in db.keys(): db['user:'******'taken' if block.has_key("nextblockhash"): nextHash = block["nextblockhash"]
class QtumBlockchain(BlockchainHandler): def __init__(self, qtum_rpc): self.qtum_rpc = qtum_rpc self.decode_hex = codecs.getdecoder("hex_codec") self.encode_hex = codecs.getencoder("hex_codec") @classmethod def from_http_provider(cls, http_provider): return cls(AuthServiceProxy(http_provider)) def reload_http_provider(self, http_provider): self.qtum_rpc = AuthServiceProxy(http_provider) def get_block_count(self): return self.qtum_rpc.getblockcount() def get_balance(self): return self.qtum_rpc.getbalance() def get_last_block_hash(self): return self.qtum_rpc.getbestblockhash() def get_second_last_block_hash(self): return self.get_block_hash(self.get_block_count() - 1) def get_block_hash(self, height): return self.qtum_rpc.getblockhash(height) def get_block_id(self, height): block_hash = self.get_block_hash(height) l = sha256(self.decode_hex(block_hash)[0]).hexdigest() r = hex(height) return l[0:10] + r[2:].rjust(10, '0') def get_last_block_id(self): last_block_height = self.get_block_count() return self.get_block_id(last_block_height) def get_second_last_block_id(self): last_block_height = self.get_block_count() - 1 return self.get_block_id(last_block_height) def get_accounts(self): unspent = self.qtum_rpc.listunspent() res = [tx['address'] for tx in unspent] return res def get_unspent(self): unspent = self.qtum_rpc.listunspent() res = {tx['address']: tx['amount'] for tx in unspent} return res def from_hex_address(self, address): return self.qtum_rpc.fromhexaddress(address)
class TwisterScraper: CACHE_MAX_DURATION = datetime.timedelta(7) # ([days [, seconds [,microseconds]]]) def __init__(self, dbPath, server='localhost', port=28332, user='******', password='******', protocol='http'): self.serverUrl = '{protocol}://{user}:{passwd}@{server}:{port}'.format(protocol=protocol, server=server, port=port, user=user, passwd=password) self.twister = AuthServiceProxy(self.serverUrl) self.dbFile = dbPath self.locService = GeoLocationService() try: with open(self.dbFile, 'rb') as dbFile: self.db = pickle.load(dbFile) except FileNotFoundError: self.db = TwisterDb() self.saveDb() def get_user(self, username): if username in self.db.users: return self.db.users[username] else: return None def scrape_users(self): nextHash = 0 nextHash = self.twister.getblockhash(0) usernames = set() index = 0 while True: block = self.twister.getblock(nextHash) self.db.lastBlockHash = block['hash'] usernames = usernames.union(set(block['usernames'])) if len(usernames) > index: index = len(usernames) print('Found {0} usernames'.format(index)) if "nextblockhash" in block: nextHash = block["nextblockhash"] else: break if len(self.db.users) == 0: # first run for u in usernames: blankUser = User() blankUser.username = u blankUser.updateTime = datetime.datetime.now() - self.CACHE_MAX_DURATION self.saveDb() now = datetime.datetime.now() old_users = self.db.users.keys() need_refresh = [u for u in old_users if (self.db.users[u].updateTime + self.CACHE_MAX_DURATION) < now] new_users = usernames.difference(set(old_users)) to_fetch = new_users.union(set(need_refresh)) total_to_fetch = len(to_fetch) for n, u in enumerate(to_fetch): try: user = self._fetch_user_details(u) if hasattr(u, 'location'): try: u.locate() except MaxGeoRequestsException: print("Could not locate '' because of max request limit reached") self.db.users[user.username] = user if n % 5 == 0: self.saveDb() print("({line} of {total}) Fetched {user} ...".format(user=u, line=n, total=total_to_fetch)) except HTTPException as e: print("Connection error retrieving user {0}: {1}".format(u, str(e))) def saveDb(self): print("Saving db") try: with open(self.dbFile, 'wb') as dbFile: pickle.dump(self.db, dbFile) except (KeyboardInterrupt, Exception): print("Closing db before quitting...") if dbFile: # close the hung descriptor and re-try the dumping try: dbFile.close() except Exception: pass with open(self.dbFile, 'wb') as dbFile: pickle.dump(self.db, dbFile) # once clean, re-raise raise def get_posts_since(self, username, dateObj, maxNum=1000): since_epoch = time.mktime(dateObj.timetuple()) all_posts = self.twister.getposts(1000, [{'username': username}]) all_posts = sorted(all_posts, key=lambda x: x['userpost']['time']) index = int(len(all_posts) / 2) def _post_time(i): return all_posts[i]['userpost']['time'] while 0 > index > len(all_posts): if _post_time(index - 1) < since_epoch < _post_time(index + 1): if _post_time(index) < since_epoch: index += 1 break elif _post_time(index) > since_epoch: index = int(index / 2) elif _post_time(index) < since_epoch: index = int(index + index / 2) return all_posts[index:] def _fetch_user_details(self, username): user = User() user.username = username avatarData = self.twister.dhtget(username, "avatar", "s") if len(avatarData) == 1: if 'p' in avatarData[0]: if 'v' in avatarData[0]['p']: user.avatar = avatarData[0]['p']['v'] profileData = self.twister.dhtget(username, 'profile', 's') if len(profileData) == 1: if 'p' in profileData[0]: if 'v' in profileData[0]['p']: profile = profileData[0]['p']['v'] for key in ['location', 'url', 'bio', 'fullname']: if key in profile: setattr(user, key, profile[key]) user.following = self.twister.getfollowing(username) user.updateTime = datetime.datetime.now() return user
class Parsing_block(): def __init__(self, from_block=0, to_block=-1, db_name="pars", collection="wallet7"): self.from_block = from_block self.to_block = to_block self.qtum = AuthServiceProxy("http://%s:%[email protected]:8333" % ("qtumuser", "qtum2018")) self.db_wallet = Table_new(db_name, collection) def block_hash_num(self, block=None): try: if not block: block = self.from_block block_hash = self.qtum.getblockhash(block) return block_hash except: pass def get_transaction_in_block(self, block_hash=None): try: if not block_hash: block_hash = self.block_hash_num() block = self.qtum.getblock(block_hash) list_tx = block["tx"] return list_tx except: pass def get_raw_transaction(self, transaction_blocks=None): try: if not transaction_blocks: transaction_blocks = self.get_transaction_in_block() transaction_list = [] for transaction_block in transaction_blocks: try: transaction_data = self.qtum.getrawtransaction( transaction_block) except JSONRPCException: try: send_data = self.qtum.sendrawtransaction( transaction_block) pprint(send_data) except JSONRPCException: pass else: transaction_list += [transaction_data] return transaction_list except: pass ''' def insert_db(self, vout): for vout_i in vout: try: n_dict = {} script_pub_key = vout_i["scriptPubKey"] addresses = script_pub_key["addresses"] value = vout_i["value"] n = vout_i["n"] n_str = str(n) list_adr = [] for iter_adr in addresses: list_adr += [{iter_adr: value}] n_dict[n_str] = list_adr print(n_dict) except KeyError: pass ''' def transaction_in(self, vin): try: for vin_i in vin: try: txid = vin_i["txid"] vout_num = vin_i["vout"] encoded_datas = self.get_raw_transaction([txid]) for i in encoded_datas: transaction_data = self.qtum.decoderawtransaction(i) vout_prev = transaction_data["vout"] vout_prev_data = vout_prev[vout_num] value_dec = vout_prev_data["value"] script_pub_key = vout_prev_data["scriptPubKey"] addresses = script_pub_key["addresses"] value_int = int(value_dec * (10**8)) for address in addresses: news = self.db_wallet.update_inc( address, "value", -value_int) except KeyError: pass except: pass def transaction_out(self, vout): try: for vout_i in vout: try: script_pub_key = vout_i["scriptPubKey"] addresses = script_pub_key["addresses"] value = vout_i["value"] value_int = int(value * (10**8)) for adr in addresses: if not self.db_wallet.find({'id': adr}): data = self.db_wallet.insert(adr, **{"value": 0}) news = self.db_wallet.update_inc( adr, "value", value_int) #self.db_wallet.delete(adr) except KeyError: pass except: pass def decode_raw_transaction(self, encoded_datas=None): try: if not encoded_datas: encoded_datas = self.get_raw_transaction() for encoded_data in encoded_datas: transaction_data = self.qtum.decoderawtransaction(encoded_data) vin = transaction_data["vin"] vout = transaction_data["vout"] self.transaction_out(vout) self.transaction_in(vin) except: pass def show_db(self): return self.db_wallet.show_db()
supply_data["blocks_axis"] = [0] supply_data["time_axis"] = [0] supply_data["shield_supply"] = [0] network_data = {} network_data["blocks_axis"] = [0] network_data["time_axis"] = [0] network_data["difficulty"] = [0] network_data["blocktime"] = [0] network_data["blocksize"] = [0] network_data["txs"] = [0] network_data["fees_ttl"] = [0] network_data["fees_perKb"] = [0] # Check if a reorg occurred last_block_hash = conn.getblockhash(supply_data["blocks_axis"][-1]) if (len(supply_data["blocks_axis"]) > 6 and last_block_hash != supply_data["lastBlockHash"]): # remove 3 datapoints to be extra safe for data_key in ["blocks_axis", "time_axis", "shield_supply"]: supply_data[data_key] = supply_data[data_key][:-3] for data_key in network_data: network_data[data_key] = network_data[data_key][:-3] # Add new data points blockCount = conn.getblockcount() while supply_data["blocks_axis"][-1] + 100 <= blockCount: # fetch block N+100 new_block_num = supply_data["blocks_axis"][-1] + 100
from bitcoinrpc.authproxy import AuthServiceProxy from datetime import datetime # rpc_user and rpc_password are set in the bitcoin.conf file rpc_connection = AuthServiceProxy("http://*****:*****@47.105.119.12:18332") best_block_hash = rpc_connection.getblockhash(202) print(rpc_connection.getblock(best_block_hash)) print(rpc_connection.getblockcount()) print(rpc_connection.listreceivedbyaddress()) print(rpc_connection.listaddressgroupings()) print(rpc_connection.listaccounts()) print(rpc_connection.getnewaddress()) print('=' * 20) # batch support : print timestamps of blocks 0 to 99 in 2 RPC round-trips: commands = [["getblockhash", height] for height in range(100)] block_hashes = rpc_connection.batch_(commands) blocks = rpc_connection.batch_([["getblock", h] for h in block_hashes]) block_times = [ datetime.utcfromtimestamp(block["time"]).strftime("%Y-%m-%d %H:%M:%S") for block in blocks ] print(block_times) print('=' * 20) #rpc_connection.sendrawtransaction('test') print('=' * 20) import hashlib import bitcoin import bitcoin.rpc
def do(self): rpc = AuthServiceProxy('http://' + settings.BITCOIN_RPC_USERNAME + ':' + settings.BITCOIN_RPC_PASSWORD + '@' + settings.BITCOIN_RPC_IP + ':' + str(settings.BITCOIN_RPC_PORT)) # Total number of blocks blocks = rpc.getblockcount() blocks_processed_queryset = CurrentBlockHeight.objects.order_by('-block_height') blocks_processed = blocks_processed_queryset[0].block_height if blocks_processed_queryset.count() else 0 # Now incoming transactions will be processed and added to database. Transactions # from new blocks are selected, but also transactions from several older blocks. # These extra transactions are updated in case something (for example fork?) is # able to modify transactions in old blocks. EXTRA_BLOCKS_TO_PROCESS = 6 process_since = max(0, blocks_processed - EXTRA_BLOCKS_TO_PROCESS) process_since_hash = rpc.getblockhash(process_since) # Get all old transactions, that require updating old_txs = Transaction.objects.filter(incoming_txid__isnull=False, block_height__gt=process_since) old_txs = [old_tx for old_tx in old_txs] txs = rpc.listsinceblock(process_since_hash)['transactions'] for tx in txs: # Skip other than receiving transactions if tx['category'] != 'receive': continue # Skip unconfirmed transactions for now # TODO: Show these too! if 'blockhash' not in tx: continue # Get required info txid = tx['txid'] address = tx['address'] amount = tx['amount'] block_height = rpc.getblock(tx['blockhash'])['height'] created_at = datetime.datetime.utcfromtimestamp(tx['timereceived']).replace(tzinfo=pytz.utc) # Skip transaction if it doesn't belong to any Wallet try: address = Address.objects.get(address=address) except Address.DoesNotExist: continue # Check if transaction already exists already_found = False for old_tx in old_txs: if old_tx.incoming_txid == txid: # Transaction already exists, so do not care about it any more old_txs.remove(old_tx) already_found = True break # If transaction is new one if not already_found: new_tx = Transaction.objects.create( wallet=address.wallet, amount=amount, description='Received', incoming_txid=txid, block_height=block_height, receiving_address=address, ) new_tx.created_at = created_at new_tx.save(update_fields=['created_at']) # Clean remaining old transactions for old_tx in old_txs: old_tx.delete() # Mark down what the last processed block was blocks = rpc.getblockcount() if blocks_processed_queryset.count() > 0: blocks_processed_queryset.update(block_height=blocks) else: CurrentBlockHeight.objects.create(block_height=blocks)
with open('progress.dat', 'r') as progress: curblock = int(progress.read()) progress.closed else: curblock = 0 rpcpipe = AuthServiceProxy('http://' + rpcuser + ':' + rpcpass + '@' + rpchost + ':44663') while (1 != 2): curblock = curblock + 1 totalblk = rpcpipe.getblockcount() if (curblock > totalblk - txnconf): with open('/root/moonaudit/progress.dat', 'w') as progress: progress.write(str(curblock - 1)) progress.closed exit() rawblockhash = rpcpipe.getblockhash(curblock) rawblockdata = rpcpipe.getblock(rawblockhash) print 'checking block %08d' % (curblock) timestamp = find_between(str(rawblockdata), 'time\': ', ', u\'bits') sendnum = 0 for txhash in rawblockdata['tx']: sendnum = sendnum + 1 txraw = rpcpipe.getrawtransaction(txhash) txdata = rpcpipe.decoderawtransaction(txraw) curvout = -1 for outputs in txdata['vout']: curvout = curvout + 1 address = '' value = 0 address = find_between(str(outputs), '[u\'', '\']') value = find_between(str(outputs), 'Decimal(\'', '\')')
class ParsingBlock(): """ Parsing all transaction in all blocks """ def __init__(self, from_block=0, to_block=-1, db_host=None, db_name=None): self.from_block = from_block self.to_block = to_block self.coinid = coin_id self.qtum = AuthServiceProxy(qtum_server) self.client = ClientBalance(settings.balanceurl) self.db = TablePars(db_host, db_name) self.storge = ClientStorge(settings.storageurl) def block_hash_num(self, block=None): # get block hash try: if not block: block = self.from_block block_hash = self.qtum.getblockhash(block) return block_hash except: pass def get_transaction_in_block(self, block_hash=None): # get list transaction in block try: if not block_hash: block_hash = self.block_hash_num() block = self.qtum.getblock(block_hash) list_tx = block["tx"] return list_tx except: pass def get_raw_transaction(self, transaction_blocks=None): # get raw transaction try: if not transaction_blocks: transaction_blocks = self.get_transaction_in_block() transaction_list = [] for transaction_block in transaction_blocks: try: transaction_data = self.qtum.getrawtransaction( transaction_block) transaction_list += [transaction_data] except JSONRPCException: try: transaction_data = self.qtum.gettransaction( transaction_block) transaction_list += [transaction_data] except JSONRPCException: pass return transaction_list except: pass def transaction_in(self, vin): # parsing input try: list_address = [] for vin_i in vin: try: txid = vin_i["txid"] vout_num = vin_i["vout"] encoded_datas = self.get_raw_transaction([txid]) for i in encoded_datas: try: transaction_data = self.qtum.decoderawtransaction( i) vout_prev = transaction_data["vout"] vout_prev_data = vout_prev[vout_num] value_dec = vout_prev_data["value"] script_pub_key = vout_prev_data["scriptPubKey"] addresses = script_pub_key["addresses"] value_int = int(value_dec * (10**8)) for adr in addresses: list_address += [{adr: value_int}] except: pass except: pass return list_address except: pass def transaction_out(self, vout, txid): # parsing output try: list_address = [False] for vout_i in vout: try: script_pub_key = vout_i["scriptPubKey"] types = script_pub_key["type"] if types == "call" and self.coinid == coin_id: asm = script_pub_key["asm"] asm_split = asm.split() gasLimit = asm_split[1] gasPrice = asm_split[2] asm_data = asm_split[3] hex_address = asm_data[:8] smart_contr_address = asm_split[4] if smart_contr_address in address_smart_contract and hex_address == sign_transfer: data = asm_data[8:] signatures_list_type = ['address', 'uint'] try: decode = self.abi_to_params( data, signatures_list_type) new_decode = self.change_decode( signatures_list_type, decode) address_token = new_decode[0] value_int = new_decode[1] address_token = Qtum.hex_to_qtum_address( address_token, mainnet=mainnet_status) result = self.db.check_address( address=address_token, coinid=coin_id_put) result_keys = result.keys() if "address" in result_keys: update_data_1 = self.client.inc_balance( address_token, value_int, coin_id_put) self.storge.log_transaction( **{ "coinid": coin_id_put, "blocknumber": self.from_block, "blockhash": self.block_hash_num(), "vin": [], "vout": [{ address_token: value_int }], "txid": txid, "gasLimit": gasLimit, "gasPrice": gasPrice }) except Exception as e: # print(e) pass addresses = script_pub_key["addresses"] value = vout_i["value"] value_int = int(value * (10**8)) for adr in addresses: data = self.db.check_address(adr, self.coinid) result_keys = data.keys() if "address" in result_keys: update_data_1 = self.client.inc_balance( adr, value_int, coin_id) list_address[0] = True list_address += [{adr: value_int}] except: pass return list_address except: pass def decode_raw_transaction(self, encoded_datas=None): # decode raw transaction try: if not encoded_datas: encoded_datas = self.get_raw_transaction() for encoded_data in encoded_datas: try: transaction_data = self.qtum.decoderawtransaction( encoded_data) # vin = transaction_data["vin"] vout = transaction_data["vout"] txid = transaction_data["txid"] # self.transaction_in(vin) vout_res = self.transaction_out(vout, txid) if vout_res[0]: vin_res = self.transaction_in(transaction_data["vin"]) self.storge.log_transaction( **{ "coinid": self.coinid, "blocknumber": self.from_block, "blockhash": self.block_hash_num(), "vin": vin_res, "vout": vout_res[1:], "txid": txid }) except: pass except: pass def change_decode(self, signatures_list_type, decode): decode = list(decode) if "address" in signatures_list_type: index_adr = signatures_list_type.index("address") decode_index_adr = decode[index_adr] new_adr = decode_index_adr[2:] decode[index_adr] = new_adr if "string" in signatures_list_type: index_str = signatures_list_type.index("string") decode_index_str = decode[index_str] new_str = decode_index_str.decode() decode[index_str] = new_str return decode def abi_to_params(self, abi, output_types): decode_hex = codecs.getdecoder("hex_codec") encode_hex = codecs.getencoder("hex_codec") data = decode_hex(abi)[0] return decode_abi(output_types, data) def get_block_count(self): # Get count documents in db return self.qtum.getblockcount()
#-------------------------------------------------- # Initialize RPC connection httpConnection = httplib.HTTPConnection(rpc_host, rpc_port, timeout=20) conn = AuthServiceProxy(rpc_url, timeout=1000, connection=httpConnection) # Read data from file try: with open("zdogecsupplydata.json", 'r') as f: data = json.load(f) except FileNotFoundError: # first run - fill initial empty supply data = {} nEmptyElems = int(nFirstZPivBlock / 100) data["lastBlockNum"] = nFirstZPivBlock data["lastBlockHash"] = conn.getblockhash(nFirstZPivBlock) data["denom_1"] = [0] * nEmptyElems data["denom_5"] = [0] * nEmptyElems data["denom_10"] = [0] * nEmptyElems data["denom_50"] = [0] * nEmptyElems data["denom_100"] = [0] * nEmptyElems data["denom_500"] = [0] * nEmptyElems data["denom_1000"] = [0] * nEmptyElems data["denom_5000"] = [0] * nEmptyElems data["total"] = [0] * nEmptyElems data["blocks_axis"] = [i * 100 for i in range(nEmptyElems)] # Check if a reorg occurred if conn.getblockhash(data["lastBlockNum"]) != data["lastBlockHash"] and len( data["blocks_axis"]) > 3: # remove 3 datapoints to be extra safe
class Bitcoind: """ Connection to a Bitcoin daemon process. """ def __init__(self, settings): """ Arguments: settings: a settings object; must contain the attribute bitcoinRPCURL. Connects to a Bitcoin daemon process, indicated by settings.bitcoinRPCURL. If settings.bitcoinRPCURL is empty, this object will not be connected. """ if settings.bitcoinRPCURL != "": log.log("Making connection to Bitcoin daemon...") self.access = AuthServiceProxy(settings.bitcoinRPCURL) log.log("...done") else: log.log("Bitcoin-RPC URL is not set: not connecting") self.access = None def isConnected(self): """ Return value: bool Returns whether this object is connected. """ return self.access != None def getBalance(self): """ Return value: int, in Satoshi Returns the balance. """ return self.DecimaltoAmount(self.access.getbalance()) def getBlockCount(self): """ Return value: int Returns the block count. """ return self.access.getblockcount() def getPrivateKey(self, address): """ Arguments: address: str, Base58Check-encoded address Return value: str, Base58Check-encoded private key Returns the private key corresponding to the given address. """ return self.access.dumpprivkey(address) def getTransactionHashesByBlockHeight(self, height): """ Arguments: height: int Return value: list of str, hexadecimal, Bitcoin hash byte order Returns the transaction hashes in the block (in the main chain) at the given height. """ bhash = self.access.getblockhash(height) block = self.access.getblock(bhash) return block["tx"] def getTransaction(self, thash): """ Arguments: thash: str, hexadecimal, Bitcoin hash byte order Return value: dict, containing: vin: list of dict, each element containing: coinbase [only for coinbase transactions] txid [only for non-coinbase transactions]: str, hexadecimal, Bitcoin hash byte order hash of input transaction Returns information about the transaction indicated by the given hash. """ return self.access.getrawtransaction(thash, 1) def listUnspent(self): """ Return value: list of dict, each element containing: address: str, Base58Check-encoded address amount: int, in Satoshi scriptPubKey: str, binary txid: str, binary, OpenSSL byte order vout: int Returns information about the available unspent transaction outputs. """ ret = self.access.listunspent() for vout in ret: vout["txid"] = binascii.unhexlify(vout["txid"])[::-1] #reversed; TODO: is this the right place? vout["scriptPubKey"] = binascii.unhexlify(vout["scriptPubKey"]) vout["amount"] = self.DecimaltoAmount(vout["amount"]) return ret def sendRawTransaction(self, txData): """ Arguments: txData: str, binary Send the given serialized transaction over the Bitcoin network. """ self.access.sendrawtransaction(txData.encode("hex")) def DecimaltoAmount(self, value): return int(value*100000000)
class LocalBlockchainRPCReader(BlockExplorerReader): rpc_connection = None #transaction_output_cache = None ''' deprecated ''' def __init__(self, database_connector = None): BlockExplorerReader.__init__(self, database_connector) #super self.rpc_connection = AuthServiceProxy( "http://%s:%s@%s:%s" % (self.config.RPC_USERNAME, self.config.RPC_PASSWORD, self.config.RPC_HOST, self.config.RPC_PORT)) def get_current_blockchain_block_height(self): return self.rpc_connection.getblockcount() #TODO: Error checking? - This should already be an integer. #Retreives a list of transactions at specified block height. Each tx # will be formatted as a BCI-like tuple per # get_bci_like_tuple_for_tx_id(). #param0: block_height: Height at which to get a list of txs for. #param1: use_tx_out_addr_cache_only (Optional): When looking up addresses # for previous transactions, ONLY refer to cache in SQLite database, # rather than slower option of using RPC interface. If set to True, # process will sleep until the data is available in the cache. Default: # False. def get_tx_list(self, block_height, use_tx_out_addr_cache_only = False): ids = self.get_tx_ids_at_height(block_height) txs = [] for tx_id in ids: bci_like_tuple = self.get_bci_like_tuple_for_tx_id( tx_id, use_tx_out_addr_cache_only) txs.append(bci_like_tuple) return txs #Checks if the specified transaction is the first time the specified address # has received funds. If it is, it will cache this for the specified # block height in the database so subsequent lookups will answer # correctly. IMPORTANT: This function assumes that that blocks are being # processed in a complete, monotonically-increasing fashion from the # genesis block. Otherwise, correct results not guaranteed! It is the # caller's responsibility to ensure that enough blocks have been # processed. def is_first_transaction_for_address(self, addr, tx_id, block_height, benchmarker = None): if self.database_connector.has_address_been_seen_cache_if_not(addr, block_height): dprint("Address %s at block height %d was already seen." % (addr, block_height)) return False else: dprint("Address %s at block height %d has no prior tx history." % (addr, block_height)) return True def get_block_hash_at_height(self, block_height): return self.rpc_connection.getblockhash(block_height) def get_tx_json_for_block_hash(self, block_hash): return self.rpc_connection.getblock(block_hash) def get_tx_ids_at_height(self, block_height): block_hash = self.get_block_hash_at_height(block_height) tx_json = self.get_tx_json_for_block_hash(block_hash) tx_ids = [] for tx_id in tx_json['tx']: tx_ids.append(tx_id) return tx_ids #Returns the transaction in raw format. If the requested transaction is # the sole transaction of the genesis block, bitcoind's RPC interface # will throw an error 'No information available about transaction # (code -5)' so we preempt this by raising a custom error that callers # should handle; iterating callers should just move onto the next tx. #throws: NoDataAvailableForGenesisBlockError def get_raw_tx(self, tx_id): if tx_id == ('4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7af' 'deda33b'): raise custom_errors.NoDataAvailableForGenesisBlockError() else: return self.rpc_connection.getrawtransaction(tx_id) #Gets a human-readable string of the transaction in JSON format. def get_decoded_tx(self, tx_id): try: return self.rpc_connection.decoderawtransaction( self.get_raw_tx(tx_id)) except custom_errors.NoDataAvailableForGenesisBlockError: #bitcoind won't generate this, but here's what it would look like genesis_json = { 'txid': ('4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2' '127b7afdeda33b'), 'version': 1, 'locktime': 0, 'vin': [{ "sequence":4294967295, 'coinbase': ('04ffff001d0104455468652054696d65732030332f4a6' '16e2f32303039204368616e63656c6c6f72206f6e2062' '72696e6b206f66207365636f6e64206261696c6f75742' '0666f722062616e6b73') }], 'vout': [ { 'value': 50.00000000, 'n': 0, 'scriptPubKey': { 'asm': ('04678afdb0fe5548271967f1a67130b7105cd6a828' 'e03909a67962e0ea1f61deb649f6bc3f4cef38c4f3' '5504e51ec112de5c384df7ba0b8d578a4c702b6bf1' '1d5f OP_CHECKSIG'), 'hex': ('4104678afdb0fe5548271967f1a67130b7105cd6a8' '28e03909a67962e0ea1f61deb649f6bc3f4cef38c4' 'f35504e51ec112de5c384df7ba0b8d578a4c702b6b' 'f11d5fac'), 'reqSigs': 1, 'type': 'pubkey', 'addresses': ['1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa'] } } ] } return genesis_json #Converts required infromation from local bitcoind RPC into a format similar # to that returned by Blockchain.info's API. This helps to make the code # more agnostic as to the source of blockchain data. #Note: When an output address cannot be decoded, BCI excludes the "addr" # field from the JSON returned. Therefore, this function will do the same. # See: # https://blockchain.info/tx/cee16a9b222f636cd27d734da0a131cee5dd7a1d09cb5f14f4d1330b22aaa38e #Note: When a previous output address for an input cannot be decoded, BCI # excludes the "addr" field from the JSON returned. Therefore, this # function will do the same. See: # https://blockchain.info/tx/8ebe1df6ebf008f7ec42ccd022478c9afaec3ca0444322243b745aa2e317c272 #param0: tx_id: Specified transaction hash #param1: use_tx_out_addr_cache_only (Optional): When looking up addresses # for previous transactions, ONLY refer to cache in SQLite database, # rather than slower option of using RPC interface. If set to True, # process will sleep until the data is available in the cache. Default: # False. def get_bci_like_tuple_for_tx_id(self, tx_id, use_tx_out_addr_cache_only = False): json_tuple = {} json_tuple['hash'] = tx_id json_tuple['inputs'] = [] json_tuple['out'] = [] subscription = None if use_tx_out_addr_cache_only: subscription = data_subscription.TxOutputAddressCacheSubscriber( database = self.database_connector) tx_json = self.get_decoded_tx(tx_id) #populate input addresses for vin in tx_json['vin']: #look up address based on its previous transaction prev_txid = None if 'txid' in vin: prev_txid = vin['txid'] prev_vout = None if 'vout' in vin: prev_vout_num = vin['vout'] #yes, this RPC field is poorly named prev_out = {'n': prev_vout_num} try: if use_tx_out_addr_cache_only: #flag specifies that we will wait for cache to catch up # before continuing this operation. Process/thread # will sleep until then. subscription.next_tx_id_needed = prev_txid subscription.next_prev_tx_ouput_pos_needed = prev_vout_num dprint(("get_bci_like_tuple_for_tx_id: May sleep until " "tx output address is cached...")) subscription.do_sleep_until_producers_ready() address = self.get_output_address(prev_txid, prev_vout_num) prev_out['addr'] = address except custom_errors.PrevOutAddressCannotBeDecodedError: pass current_input = {'prev_out': prev_out} json_tuple['inputs'].append(current_input) else: #If there's no index specifying the txo from prev tx, there's # probably nothing to do here. Should only come up for # coinbase transactions. continue #populate output addresses for vout in tx_json['vout']: output_index = vout['n'] current_output = {'n':output_index} if 'scriptPubKey' in vout and 'addresses' in vout['scriptPubKey']: address = vout['scriptPubKey']['addresses'][0] current_output['addr'] = address json_tuple['out'].append(current_output) return json_tuple #Returns an ordered list of output addresses for the specified transaction # JSON as returned by the bitcoind RPC interface. If an address cannot be # decoded for one of the outputs, a value of None will be inserted # at that position in the list. #TODO: This does not properly handle multisig outputs that list multiple # addresses per output. See: # http://bitcoin.stackexchange.com/questions/4687/can-a-scriptpubkey-have-multiple-addresses # When support for this is added, make sure to add a test case. def get_output_addresses(self, tx_json): assert 'vout' in tx_json output_addresses = [] for vout in tx_json['vout']: assert 'scriptPubKey' in vout if 'addresses' in vout['scriptPubKey']: ouput_address = vout['scriptPubKey']['addresses'][0] output_addresses.append(ouput_address) else: output_addresses.append(None) return output_addresses #Raises: custom_errors.PrevOutAddressCannotBeDecoded #TODO: This does not properly handle multisig outputs that list multiple # addresses per output. def get_output_address(self, tx_id, output_index, tx_json = None): if USE_TX_OUTPUT_ADDR_CACHE_FIRST: addr = self.database_connector.get_output_address(tx_id, output_index) if addr is not None: return addr #not in cache, fall back to querying RPC interface if tx_json is None: tx_json = self.get_decoded_tx(tx_id) if 'vout' in tx_json and len(tx_json['vout']) > output_index and \ 'scriptPubKey' in tx_json['vout'][output_index]: if 'addresses' not in tx_json['vout'][output_index]['scriptPubKey']: raise custom_errors.PrevOutAddressCannotBeDecodedError else: return tx_json['vout'][output_index]['scriptPubKey'][ 'addresses'][0] else: msg = ("Missing element for vout in get_output_address() with tx " "id %s and output index %d") % (tx_id, output_index) logger.log_and_die(msg)
class chainxdSSH(object): def __init__(self, host, port, username): self.host = host self.port = port self.username = username self.ssh = None self.channel = None self.fw_channel = None self.connected = False self.ssh_thread = None def __del__(self): self.disconnect() def remote_command(self, cmd): channel = None try: channel = self.ssh.get_transport().open_session() channel.exec_command(cmd) ret_code = channel.recv_exit_status() if ret_code == 0: for idx in range(1, 20): if channel.recv_ready(): break time.sleep(0.1) if not channel.recv_ready(): raise Exception('Data not ready') data = channel.recv(500) return data.decode().split('\n') else: for idx in range(1, 20): if channel.recv_stderr_ready(): break time.sleep(0.1) if channel.recv_stderr_ready(): data = channel.recv_stderr(500) error = data.decode() raise Exception(error) else: raise UnknownError('Unknown error executing remote command: ' + cmd) finally: if channel: channel.close() def connect(self): import paramiko self.ssh = paramiko.SSHClient() self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) password = None pass_message = None while True: try: self.ssh.connect(self.host, port=int(self.port), username=self.username, password=password) self.connected = True if password: SshPassCache.save_password(self.username, self.host, password) break except PasswordRequiredException as e: # private key with password protection is used; ask user for password pass_message = "Enter passphrase for <b>private key</b> or password for %s" % \ (self.username + '@' + self.host) while True: password = SshPassCache.get_password(self.username, self.host, message=pass_message) if password: break chain_port = self.remote_port except AuthenticationException as e: # This exception will be raised in the following cases: # 1. a private key with password protectection is used but the user enters incorrect password # 2. a private key exists but user's public key is not added to the server's allowed keys # 3. normal login to server is performed but the user enters bad password # So, in the first case, the second query for password will ask for normal password to server, not # for a private key. WndUtils.errorMsg(message='Incorrect password, try again...') while True: password = SshPassCache.get_password(self.username, self.host, message=pass_message) if password: break except SSHException as e: if e.args and e.args[0] == 'No authentication methods available': while True: password = SshPassCache.get_password(self.username, self.host) if password: break else: raise except Exception as e: raise def open_tunnel(self, local_port, remote_ip, remote_port): if self.connected: ready_event = threading.Event() self.ssh_thread = SSHTunnelThread(local_port, remote_ip, remote_port, self.ssh.get_transport(), ready_event) self.ssh_thread.start() ready_event.wait(10) print('Started local port forwarding 127.0.0.1:%s -> %s:%s' % (str(local_port), remote_ip, str(remote_port))) else: raise Exception('SSH not connected') self.config = config # conn configurations are used from the first item in the list; if one fails, then next is taken if connection: # this parameter is used for testing specific connection self.connections = [connection] else: # get connection list orderd by priority of use self.connections = self.config.get_ordered_conn_list() self.cur_conn_index = 0 if self.connections: self.cur_conn_def = self.connections[self.cur_conn_index] else: self.cur_conn_def = None # below is the connection with which particular RPC call has started; if connection is switched because of # problems with some nodes, switching stops if we close round and return to the starting connection self.starting_conn = None self.ssh = None self.window = window self.active = False self.rpc_url = None self.proxy = None self.http_conn = None # HTTPConnection object passed to the AuthServiceProxy (for convinient connection reset) self.on_connection_begin_callback = on_connection_begin_callback self.on_connection_try_fail_callback = on_connection_try_fail_callback self.on_connection_finished_callback = on_connection_finished_callback self.last_error_message = None def apply_new_cfg(self): """ Called after any of connection config changed. """ # get connection list orderd by priority of use self.disconnect() self.connections = self.config.get_ordered_conn_list() self.cur_conn_index = 0 if not len(self.connections): raise Exception('There is no connections to chainx network enabled in the configuration.') self.cur_conn_def = self.connections[self.cur_conn_index] def disconnect(self): if self.active: if self.ssh: self.ssh.disconnect() del self.ssh self.ssh = None self.active = False def mark_call_begin(self): self.starting_conn = self.cur_conn_def def switch_to_next_config(self): """ If there is another chainxd config not used recently, switch to it. Called only when there was a problem with current connection config. :return: True if successfully switched ot False if there was no another config """ if self.cur_conn_def: self.config.conn_cfg_failure(self.cur_conn_def) # mark connection as defective if self.cur_conn_index < len(self.connections)-1: idx = self.cur_conn_index + 1 else: idx = 0 conn = self.connections[idx] if conn != self.starting_conn: self.disconnect() self.cur_conn_index = idx self.cur_conn_def = conn if not self.open(): return self.switch_to_next_config() else: return True else: return False def mark_cur_conn_cfg_is_ok(self): if self.cur_conn_def: self.config.conn_cfg_success(self.cur_conn_def) def open(self): """ Opens connection to chainx RPC. If it fails, then the next enabled conn config will be used, if any exists. :return: True if successfully connected, False if user cancelled the operation. If all of the attempts fail, then appropriate exception will be raised. """ try: if not self.cur_conn_def: raise Exception('There is no connections to chainx network enabled in the configuration.') while True: try: if self.open_internal(): break else: if not self.switch_to_next_config(): return False except UserCancelledConnection: return False except (socket.gaierror, ConnectionRefusedError, TimeoutError, socket.timeout) as e: # exceptions raised by not likely functioning chainxd node; try to switch to another node # if there is any in the config if not self.switch_to_next_config(): raise e # couldn't use another conn config, raise exception else: break except Exception as e: self.last_error_message = str(e) raise return True def open_internal(self): """ Try to establish connection to chainxd RPC daemon for current connection config. :return: True, if connection successfully establishes, False if user Cancels the operation (not always cancelling will be possible - only when user is prompted for a password). """ if not self.active: if self.cur_conn_def.use_ssh_tunnel: # RPC over SSH while True: self.ssh = ChainxdSSH(self.cur_conn_def.ssh_conn_cfg.host, self.cur_conn_def.ssh_conn_cfg.port, self.cur_conn_def.ssh_conn_cfg.username) try: logging.info('starting ssh.connect') self.ssh.connect() logging.info('finished ssh.connect') break except Exception as e: logging.error('error in ssh.connect') raise # configure SSH tunnel # get random local unprivileged port number to establish SSH tunnel success = False local_port = None for try_nr in range(1, 10): try: logging.info('beginning ssh.open_tunnel') local_port = randint(2000, 50000) self.ssh.open_tunnel(local_port, self.cur_conn_def.host, int(self.cur_conn_def.port)) success = True break except Exception as e: logging.error('error in ssh.open_tunnel loop') pass logging.info('finished ssh.open_tunnel loop') if not success: logging.error('finished ssh.open_tunnel loop with error') return False else: rpc_user = self.cur_conn_def.username rpc_password = self.cur_conn_def.password rpc_host = '127.0.0.1' # SSH tunnel on loopback rpc_port = local_port else: # direct RPC rpc_host = self.cur_conn_def.host rpc_port = self.cur_conn_def.port rpc_user = self.cur_conn_def.username rpc_password = self.cur_conn_def.password if self.cur_conn_def.use_ssl: self.rpc_url = 'https://' self.http_conn = httplib.HTTPSConnection(rpc_host, rpc_port, timeout=5, context=ssl._create_unverified_context()) else: self.rpc_url = 'http://' self.http_conn = httplib.HTTPConnection(rpc_host, rpc_port, timeout=5) logging.info('AuthServiceProxy begin') self.rpc_url += rpc_user + ':' + rpc_password + '@' + rpc_host + ':' + str(rpc_port) self.proxy = AuthServiceProxy(self.rpc_url, timeout=1000, connection=self.http_conn) logging.info('AuthServiceProxy end') try: if self.on_connection_begin_callback: try: # make the owner know, we are connecting logging.info('on_connection_begin_callback begin') self.on_connection_begin_callback() logging.info('on_connection_begin_callback end') except: pass # check the connection logging.info('starting http_conn.connect()') self.http_conn.connect() logging.info('finished http_conn.connect()') if self.on_connection_finished_callback: try: # make the owner know, we successfully finished connection self.on_connection_finished_callback() except: pass except: if self.on_connection_try_fail_callback: try: # make the owner know, connection attempt failed self.on_connection_try_fail_callback() except: pass raise finally: logging.info('http_conn.close()') self.http_conn.close() # timeout hase been initially set to 5 seconds to perform 'quick' connection test self.http_conn.timeout = 20 self.active = True return self.active def get_active_conn_description(self): if self.cur_conn_def: return self.cur_conn_def.get_description() else: return '???' @control_rpc_call def getblockcount(self): if self.open(): return self.proxy.getblockcount() else: raise Exception('Not connected') @control_rpc_call def getblockhash(self, block): if self.open(): return self.proxy.getblockhash(block) else: raise Exception('Not connected') @control_rpc_call def getinfo(self): if self.open(): return self.proxy.getinfo() else: raise Exception('Not connected') @control_rpc_call def issynchronized(self): if self.open(): # if connecting to HTTP(S) proxy do not check if chainxd daemon is synchronized if self.cur_conn_def.is_http_proxy(): return True else: syn = self.proxy.mnsync('status') return syn.get('IsSynced') else: raise Exception('Not connected') @control_rpc_call def mnsync(self): if self.open(): # if connecting to HTTP(S) proxy do not call this function - it will not be exposed if self.cur_conn_def.is_http_proxy(): return {} else: return self.proxy.mnsync('status') else: raise Exception('Not connected') @control_rpc_call def masternodebroadcast(self, what, hexto): if self.open(): return self.proxy.masternodebroadcast(what, hexto) else: raise Exception('Not connected') @control_rpc_call def get_masternodelist(self, *args): if self.open(): return self.proxy.masternodelist(*args) else: raise Exception('Not connected') @control_rpc_call def get_masternodeaddr(self): if self.open(): return self.proxy.masternodelist('addr') else: raise Exception('Not connected') @control_rpc_call def getaddressbalance(self, address): if self.open(): return self.proxy.getaddressbalance({'addresses': [address]}).get('balance') else: raise Exception('Not connected') @control_rpc_call def getaddressutxos(self, addresses): if self.open(): return self.proxy.getaddressutxos({'addresses': addresses}) else: raise Exception('Not connected') @control_rpc_call def getrawtransaction(self, txid, verbose): if self.open(): return self.proxy.getrawtransaction(txid, verbose) else: raise Exception('Not connected') @control_rpc_call def getblockhash(self, blockid): if self.open(): return self.proxy.getblockhash(blockid) else: raise Exception('Not connected') @control_rpc_call def getblockheader(self, blockhash): if self.open(): return self.proxy.getblockheader(blockhash) else: raise Exception('Not connected') @control_rpc_call def validateaddress(self, address): if self.open(): return self.proxy.validateaddress(address) else: raise Exception('Not connected') @control_rpc_call def decoderawtransaction(self, tx): if self.open(): return self.proxy.decoderawtransaction(tx) else: raise Exception('Not connected') @control_rpc_call def sendrawtransaction(self, tx): if self.open(): return self.proxy.sendrawtransaction(tx) else: raise Exception('Not connected')