def get_address_balance(self, scrhash): history = self.get_electrum_history(scrhash) if history == None: return None utxos = {} for tx_info in history: tx = self.rpc.call("gettransaction", [tx_info["tx_hash"]]) txd = self.rpc.call("decoderawtransaction", [tx["hex"]]) for index, output in enumerate(txd["vout"]): if script_to_scripthash(output["scriptPubKey"]["hex"] ) != scrhash: continue utxos[txd["txid"] + ":" + str(index)] = (output["value"], tx["confirmations"]) for inputt in txd["vin"]: outpoint = inputt["txid"] + ":" + str(inputt["vout"]) if outpoint in utxos: del utxos[outpoint] confirmed_balance = 0 unconfirmed_balance = 0 for utxo in utxos.values(): value = int(Decimal(utxo[0]) * Decimal(1e8)) if utxo[1] > 0: confirmed_balance += value else: unconfirmed_balance += value return {"confirmed": confirmed_balance, "unconfirmed": unconfirmed_balance}
def build_address_history(self, monitored_scriptpubkeys): logger = self.logger logger.info("Building history with " + str(len(monitored_scriptpubkeys)) + " addresses . . .") st = time.time() address_history = {} for spk in monitored_scriptpubkeys: address_history[script_to_scripthash(spk)] = {'history': [], 'subscribed': False} wallet_addr_scripthashes = set(address_history.keys()) self.reorganizable_txes = [] #populate history #which is a blockheight-ordered list of ("txhash", height) #unconfirmed transactions go at the end as ("txhash", 0, fee) # 0=unconfirmed -1=unconfirmed with unconfirmed parents BATCH_SIZE = 1000 ret = list(range(BATCH_SIZE)) t = 0 count = 0 obtained_txids = set() last_tx = None while len(ret) == BATCH_SIZE: ret = self.rpc.call("listtransactions", ["*", BATCH_SIZE, t, True]) logger.debug("listtransactions skip=" + str(t) + " len(ret)=" + str(len(ret))) if t == 0 and len(ret) > 0: last_tx = ret[-1] t += len(ret) for tx in ret: if "txid" not in tx or "category" not in tx: continue if tx["category"] not in ("receive", "send", "generate", "immature"): continue if tx["confirmations"] < 0: continue #conflicted if tx["txid"] in obtained_txids: continue logger.debug("adding obtained tx=" + str(tx["txid"])) obtained_txids.add(tx["txid"]) #obtain all the addresses this transaction is involved with output_scriptpubkeys, input_scriptpubkeys, txd = \ self.get_input_and_output_scriptpubkeys(tx["txid"]) output_scripthashes = [script_to_scripthash(sc) for sc in output_scriptpubkeys] sh_to_add = wallet_addr_scripthashes.intersection(set( output_scripthashes)) input_scripthashes = [script_to_scripthash(sc) for sc in input_scriptpubkeys] sh_to_add |= wallet_addr_scripthashes.intersection(set( input_scripthashes)) if len(sh_to_add) == 0: continue for wal in self.deterministic_wallets: overrun_depths = wal.have_scriptpubkeys_overrun_gaplimit( output_scriptpubkeys) if overrun_depths != None: logger.error("Not enough addresses imported.") logger.error("Delete wallet.dat and increase the value" + " of `initial_import_count` in the file" + " `config.ini` then reimport and rescan") #TODO make it so users dont have to delete wallet.dat # check whether all initial_import_count addresses are # imported rather than just the first one return False new_history_element = self.generate_new_history_element(tx, txd) for scripthash in sh_to_add: address_history[scripthash][ "history"].append(new_history_element) if tx["confirmations"] > 0 and (tx["confirmations"] < CONFIRMATIONS_SAFE_FROM_REORG): self.reorganizable_txes.append((tx["txid"], tx["blockhash"], new_history_element["height"], sh_to_add)) count += 1 unconfirmed_txes = defaultdict(list) for scrhash, his in address_history.items(): uctx = self.sort_address_history_list(his) for u in uctx: unconfirmed_txes[u["tx_hash"]].append(scrhash) logger.debug("unconfirmed_txes = " + str(unconfirmed_txes)) logger.debug("reorganizable_txes = " + str(self.reorganizable_txes)) if len(ret) > 0: #txid doesnt uniquely identify transactions from listtransactions #but the tuple (txid, address) does self.last_known_wallet_txid = (last_tx["txid"], last_tx.get("address", None)) else: self.last_known_wallet_txid = None logger.debug("last_known_wallet_txid = " + str( self.last_known_wallet_txid)) et = time.time() logger.info("Found " + str(count) + " txes. History built in " + str(et - st) + "sec") self.address_history = address_history self.unconfirmed_txes = unconfirmed_txes return True
def check_for_new_txes(self): logger = self.logger MAX_TX_REQUEST_COUNT = 256 tx_request_count = 2 max_attempts = int(math.log(MAX_TX_REQUEST_COUNT, 2)) for i in range(max_attempts): ##how listtransactions works ##skip and count parameters take most-recent txes first ## so skip=0 count=1 will return the most recent tx ##and skip=0 count=3 will return the 3 most recent txes ##but the actual list returned has the REVERSED order ##skip=0 count=3 will return a list with the most recent tx LAST ret = self.rpc.call("listtransactions", ["*", tx_request_count, 0, True]) ret = ret[::-1] if self.last_known_wallet_txid == None: recent_tx_index = len(ret) #=0 means no new txes break else: txid_list = [(tx["txid"], tx.get("address", None)) for tx in ret] recent_tx_index = next((i for i, (txid, addr) in enumerate(txid_list) if txid == self.last_known_wallet_txid[0] and addr == self.last_known_wallet_txid[1]), -1) if recent_tx_index != -1: break tx_request_count *= 2 #TODO low priority: handle a user getting more than 255 new # transactions in 15 seconds if len(ret) > 0: self.last_known_wallet_txid = (ret[0]["txid"], ret[0].get("address", None)) assert(recent_tx_index != -1) if recent_tx_index == 0: return set() new_txes = ret[:recent_tx_index][::-1] logger.debug("new txes = " + str(new_txes)) obtained_txids = set() updated_scripthashes = [] for tx in new_txes: if "txid" not in tx or "category" not in tx: continue if tx["category"] not in ("receive", "send", "generate", "immature"): continue if tx["confirmations"] < 0: continue #conflicted if tx["txid"] in obtained_txids: continue obtained_txids.add(tx["txid"]) output_scriptpubkeys, input_scriptpubkeys, txd = \ self.get_input_and_output_scriptpubkeys(tx["txid"]) matching_scripthashes = [] for spk in (output_scriptpubkeys + input_scriptpubkeys): scripthash = script_to_scripthash(spk) if scripthash in self.address_history: matching_scripthashes.append(scripthash) if len(matching_scripthashes) == 0: continue for wal in self.deterministic_wallets: overrun_depths = wal.have_scriptpubkeys_overrun_gaplimit( output_scriptpubkeys) if overrun_depths != None: for change, import_count in overrun_depths.items(): new_addrs, spks = wal.get_new_addresses(change, import_count) for spk in spks: self.address_history[script_to_scripthash( spk)] = {'history': [], 'subscribed': False} logger.debug("importing " + str(len(spks)) + " into change=" + str(change)) import_addresses(self.rpc, new_addrs, [], -1, 0, logger) updated_scripthashes.extend(matching_scripthashes) new_history_element = self.generate_new_history_element(tx, txd) logger.info("Found new tx: " + str(new_history_element)) for scrhash in matching_scripthashes: self.address_history[scrhash]["history"].append( new_history_element) if new_history_element["height"] <= 0: self.unconfirmed_txes[tx["txid"]].append(scrhash) if tx["confirmations"] > 0: self.reorganizable_txes.append((tx["txid"], tx["blockhash"], new_history_element["height"], matching_scripthashes)) return set(updated_scripthashes)