def update_tx_cache(self, txid): inrows = self.get_tx_inputs(txid, False) for row in inrows: _hash = self.binout(row[6]) if not _hash: #print "WARNING: missing tx_in for tx", txid continue address = hash_to_address(chr(self.addrtype), _hash) if self.tx_cache.has_key(address): print "cache: invalidating", address self.tx_cache.pop(address) self.address_queue.put(address) outrows = self.get_tx_outputs(txid, False) for row in outrows: _hash = self.binout(row[6]) if not _hash: #print "WARNING: missing tx_out for tx", txid continue address = hash_to_address(chr(self.addrtype), _hash) if self.tx_cache.has_key(address): print "cache: invalidating", address self.tx_cache.pop(address) self.address_queue.put(address)
def update_tx_cache(self, txid): inrows = self.get_tx_inputs(txid, False) for row in inrows: _hash = self.binout(row[6]) if not _hash: #print_log("WARNING: missing tx_in for tx", txid) continue address = hash_to_address(chr(self.addrtype), _hash) with self.cache_lock: if address in self.tx_cache: print_log("cache: invalidating", address) self.tx_cache.pop(address) self.address_queue.put(address) outrows = self.get_tx_outputs(txid, False) for row in outrows: _hash = self.binout(row[6]) if not _hash: #print_log("WARNING: missing tx_out for tx", txid) continue address = hash_to_address(chr(self.addrtype), _hash) with self.cache_lock: if address in self.tx_cache: print_log("cache: invalidating", address) self.tx_cache.pop(address) self.address_queue.put(address)
def export_store_to_csv(store): all_tx_hash = store.selectall("SELECT tx_hash FROM tx") #all_tx_hash = store.selectall("SELECT tx_hash FROM tx LIMIT 1000000") with open(IN_TRANSACTION_CSV_LOCATION, 'ab') as in_file: in_writer = csv.writer(in_file, delimiter=',') with open(OUT_TRANSACTION_CSV_LOCATION, 'ab') as out_file: out_writer = csv.writer(out_file, delimiter=',') for (tx_hash, ) in all_tx_hash: tx_data = store.export_tx(tx_hash=tx_hash, format='browser') #Write the tx_in if tx_data["value_in"] is None: #Write the tx_in -> coinbase case in_writer.writerow( [tx_hash, "coinbase", tx_data["value_out"]]) else: #Write the tx_in -> normal case for in_details in tx_data["in"]: if in_details["binaddr"] is None: continue val = in_details['value'] addr = util.hash_to_address( in_details["address_version"], in_details['binaddr']) in_writer.writerow([tx_hash, addr, val]) #Write the tx_out for out_details in tx_data["out"]: if out_details["binaddr"] is None: continue val = out_details['value'] addr = util.hash_to_address(out_details["address_version"], out_details['binaddr']) out_writer.writerow([tx_hash, addr, val]) return
def test_tx_txOut_scripts(txdata): chain, tx, reftx = txdata for txout in xrange(len(tx['txOut'])): if len(tx['txOut'][txout]['scriptPubKey']) > 0: txotype, data = chain.parse_txout_script( tx['txOut'][txout]['scriptPubKey']) assert SCRIPT[txotype] == reftx['vout'][txout]['scriptPubKey'][ 'type'] if txotype in (Chain.SCRIPT_TYPE_P2SH, Chain.SCRIPT_TYPE_ADDRESS): version = chain.address_version # This is probably only valid for Bitcoin/Testnet # FIXME: Check What Would Abe Do (WWAD) if txotype == Chain.SCRIPT_TYPE_P2SH: version = '\x05' if version == '\x00' else '\xC4' assert util.hash_to_address(version, data) == \ reftx['vout'][txout]['scriptPubKey']['addresses'][0]
def get_history(self, addr): cached_version = self.tx_cache.get( addr ) if cached_version is not None: return cached_version version, binaddr = decode_check_address(addr) if binaddr is None: return None dbhash = self.binin(binaddr) rows = [] rows += self.get_address_out_rows( dbhash ) rows += self.get_address_in_rows( dbhash ) txpoints = [] known_tx = [] for row in rows: try: nTime, chain_id, height, is_in, blk_hash, tx_hash, tx_id, pos, value = row except: print "cannot unpack row", row break tx_hash = self.hashout_hex(tx_hash) txpoint = { "timestamp": int(nTime), "height": int(height), "is_input": int(is_in), "block_hash": self.hashout_hex(blk_hash), "tx_hash": tx_hash, "tx_id": int(tx_id), "index": int(pos), "value": int(value), } txpoints.append(txpoint) known_tx.append(self.hashout_hex(tx_hash)) # todo: sort them really... txpoints = sorted(txpoints, key=operator.itemgetter("timestamp")) # read memory pool rows = [] rows += self.get_address_in_rows_memorypool( dbhash ) rows += self.get_address_out_rows_memorypool( dbhash ) address_has_mempool = False for row in rows: is_in, tx_hash, tx_id, pos, value = row tx_hash = self.hashout_hex(tx_hash) if tx_hash in known_tx: continue # discard transactions that are too old if self.last_tx_id - tx_id > 50000: print "discarding tx id", tx_id continue # this means that pending transactions were added to the db, even if they are not returned by getmemorypool address_has_mempool = True #print "mempool", tx_hash txpoint = { "timestamp": 0, "height": 0, "is_input": int(is_in), "block_hash": 'mempool', "tx_hash": tx_hash, "tx_id": int(tx_id), "index": int(pos), "value": int(value), } txpoints.append(txpoint) for txpoint in txpoints: tx_id = txpoint['tx_id'] txinputs = [] inrows = self.get_tx_inputs(tx_id) for row in inrows: _hash = self.binout(row[6]) if not _hash: #print "WARNING: missing tx_in for tx", tx_id, addr continue address = hash_to_address(chr(self.addrtype), _hash) txinputs.append(address) txpoint['inputs'] = txinputs txoutputs = [] outrows = self.get_tx_outputs(tx_id) for row in outrows: _hash = self.binout(row[6]) if not _hash: #print "WARNING: missing tx_out for tx", tx_id, addr continue address = hash_to_address(chr(self.addrtype), _hash) txoutputs.append(address) txpoint['outputs'] = txoutputs # for all unspent inputs, I want their scriptpubkey. (actually I could deduce it from the address) if not txpoint['is_input']: # detect if already redeemed... for row in outrows: if row[6] == dbhash: break else: raise #row = self.get_tx_output(tx_id,dbhash) # pos, script, value, o_hash, o_id, o_pos, binaddr = row # if not redeemed, we add the script if row: if not row[4]: txpoint['raw_output_script'] = row[1] txpoint.pop('tx_id') # cache result # do not cache mempool results because statuses are ambiguous if not address_has_mempool: self.tx_cache[addr] = txpoints return txpoints
def get_history(self, addr, cache_only=False): # todo: make this more efficient. it iterates over txpoints multiple times with self.cache_lock: cached_version = self.tx_cache.get(addr) if cached_version is not None: return cached_version if cache_only: return -1 version, binaddr = decode_check_address(addr) if binaddr is None: return None dbhash = self.binin(binaddr) rows = [] rows += self.get_address_out_rows(dbhash) rows += self.get_address_in_rows(dbhash) txpoints = [] known_tx = [] for row in rows: try: nTime, chain_id, height, is_in, blk_hash, tx_hash, tx_id, pos, value = row except: print_log("cannot unpack row", row) break tx_hash = self.hashout_hex(tx_hash) txpoints.append({ "timestamp": int(nTime), "height": int(height), "is_input": int(is_in), "block_hash": self.hashout_hex(blk_hash), "tx_hash": tx_hash, "tx_id": int(tx_id), "index": int(pos), "value": int(value), }) known_tx.append(self.hashout_hex(tx_hash)) # todo: sort them really... txpoints = sorted(txpoints, key=operator.itemgetter("timestamp")) # read memory pool rows = [] rows += self.get_address_in_rows_memorypool(dbhash) rows += self.get_address_out_rows_memorypool(dbhash) address_has_mempool = False for row in rows: is_in, tx_hash, tx_id, pos, value = row tx_hash = self.hashout_hex(tx_hash) if tx_hash in known_tx: continue # discard transactions that are too old if self.last_tx_id - tx_id > 50000: print_log("discarding tx id", tx_id) continue # this means that pending transactions were added to the db, even if they are not returned by getmemorypool address_has_mempool = True #print_log("mempool", tx_hash) txpoints.append({ "timestamp": 0, "height": 0, "is_input": int(is_in), "block_hash": 'mempool', "tx_hash": tx_hash, "tx_id": int(tx_id), "index": int(pos), "value": int(value), }) for txpoint in txpoints: tx_id = txpoint['tx_id'] txinputs = [] inrows = self.get_tx_inputs(tx_id) for row in inrows: _hash = self.binout(row[6]) if not _hash: #print_log("WARNING: missing tx_in for tx", tx_id, addr) continue address = hash_to_address(chr(self.addrtype), _hash) txinputs.append(address) txpoint['inputs'] = txinputs txoutputs = [] outrows = self.get_tx_outputs(tx_id) for row in outrows: _hash = self.binout(row[6]) if not _hash: #print_log("WARNING: missing tx_out for tx", tx_id, addr) continue address = hash_to_address(chr(self.addrtype), _hash) txoutputs.append(address) txpoint['outputs'] = txoutputs # for all unspent inputs, I want their scriptpubkey. (actually I could deduce it from the address) if not txpoint['is_input']: # detect if already redeemed... for row in outrows: if row[6] == dbhash: break else: raise #row = self.get_tx_output(tx_id,dbhash) # pos, script, value, o_hash, o_id, o_pos, binaddr = row # if not redeemed, we add the script if row: if not row[4]: txpoint['raw_output_script'] = row[1] txpoint.pop('tx_id') txpoints = map(lambda x: {'tx_hash': x['tx_hash'], 'height': x['height']}, txpoints) out = [] for item in txpoints: if item not in out: out.append(item) # cache result ## do not cache mempool results because statuses are ambiguous #if not address_has_mempool: with self.cache_lock: self.tx_cache[addr] = out return out