def calculate_tx_fees(coins, currency, tx_hex): #Process source TX. rpc = coins[currency]["rpc"]["sock"] tx = CTransaction.deserialize(binascii.unhexlify(tx_hex)) #Tally input coins. input_total = decimal.Decimal(0) for vin in tx.vin: txid = b2lx(vin.prevout.hash) vin_tx_hex = rpc.getrawtransaction(txid) vin_tx = CTransaction.deserialize(binascii.unhexlify(vin_tx_hex)) input_value = vin_tx.vout[vin.prevout.n].nValue input_total += decimal.Decimal(str_money_value(input_value)) #Tally output coins. output_total = decimal.Decimal(0) for vout in tx.vout: output_value = decimal.Decimal(str_money_value(vout.nValue)) output_total += output_value #TX fees are the difference between the source and fees = input_total - output_total #Return totals and fees. return [input_total, output_total, fees]
def bitcoin_to_string(value): """Convert bitcoin value to a string""" #TODO: Append zeroes up to standard length bitcoin_str = str_money_value(abs(value)) if value < 0: return '- ' + bitcoin_str else: return bitcoin_str
while (value_in - value_out) / len(tx.serialize()) < feeperbyte1: delta_fee = math.ceil((feeperbyte1 * len(tx.serialize())) - (value_in - value_out)) if change_txout.nValue - delta_fee > dust_amount: change_txout.nValue -= delta_fee value_out -= delta_fee if value_in - value_out < 0: new_outpoint = unspent[-1]['outpoint'] new_amount = unspent[-1]['amount'] unspent = unspent[:-1] print('Adding new input %s:%d with value %s BTC' % \ (b2lx(new_outpoint.hash), new_outpoint.n, str_money_value(new_amount))) new_txin = CMutableTxIn(new_outpoint, nSequence=tx1_nSequence) tx.vin.append(new_txin) value_in += new_amount change_txout.nValue += new_amount value_out += new_amount r = rpc.signrawtransaction(tx) assert(r['complete']) tx.vin[-1].scriptSig = r['tx'].vin[-1].scriptSig r = rpc.signrawtransaction(tx) assert(r['complete'])
bitcoin.SelectParams('testnet') proxy = bitcoin.rpc.Proxy() dust_txouts = [unspent for unspent in proxy.listunspent(0) if unspent['amount'] <= args.dust] sum_dust_after_fees = 0 for dust_txout in dust_txouts: sum_dust_after_fees += max(dust_txout['amount'] - 1480, 0) if not dust_txouts: print("Your wallet doesn't have any dust in it!") sys.exit(0) print('You have %d dust txouts, worth %s BTC after fees.' % ( (len(dust_txouts), str_money_value(sum_dust_after_fees)))) print() print('Get rid of them? y/n: ', end='') choice = raw_input().lower().strip() if choice != 'y': print('Canceled!') sys.exit(1) # User gave the ok, create a NONE|ANYONECANPAY tx spending those txouts txins = [CTxIn(dust_txout['outpoint']) for dust_txout in dust_txouts] txouts = [CTxOut(0, CScript([OP_RETURN]))] tx = CTransaction(txins, txouts)
def __do_bitcoin(self): """Do Bitcoin-related maintenance""" # FIXME: we shouldn't have to create a new proxy each time, but with # current python-bitcoinlib and the RPC implementation it seems that # the proxy connection can timeout w/o recovering properly. proxy = bitcoin.rpc.Proxy() new_blocks = self.known_blocks.update_from_proxy(proxy) if not new_blocks: return logging.debug("") logging.debug( "pending_commitments: %d\nunconfirmed_txs:%d" % (len(self.pending_commitments), len(self.unconfirmed_txs))) for (block_height, block_hash) in new_blocks: logging.info("New block %s at height %d" % (b2lx(block_hash), block_height)) # Save commitments to disk that have reached min_confirmations confirmed_tx = self.txs_waiting_for_confirmation.pop( block_height - self.min_confirmations + 1, None) if confirmed_tx is not None: self.__save_confirmed_timestamp_tx(confirmed_tx) # If there already are txs waiting for confirmation at this # block_height, there was a reorg and those pending commitments now # need to be added back to the pool reorged_tx = self.txs_waiting_for_confirmation.pop( block_height, None) if reorged_tx is not None: # FIXME: the reorged transaction might get mined in another # block, so just adding the commitments for it back to the pool # isn't ideal, but it is safe logging.info( 'tx %s at height %d removed by reorg, adding %d commitments back to pending' % (b2lx(reorged_tx.tx.GetTxid()), block_height, len(reorged_tx.commitment_timestamps))) for reorged_commitment_timestamp in reorged_tx.commitment_timestamps: self.pending_commitments.add( reorged_commitment_timestamp.msg) # Check if this block contains any of the pending transactions try: block = proxy.getblock(block_hash) except KeyError: # Must have been a reorg or something, return logging.error("Failed to get block") return # the following is an optimization, by pre computing the serialization of tx # we avoid this step for every unconfirmed tx serde_txs = [] for tx in block.vtx: serde_txs.append( (tx, tx.serialize(params={'include_witness': False}))) # Check all potential pending txs against this block. # iterating in reverse order to prioritize most recent digest which commits to a bigger merkle tree for (i, unconfirmed_tx) in enumerate(self.unconfirmed_txs[::-1]): (block_timestamp, found_tx) = make_timestamp_from_block( unconfirmed_tx.tip_timestamp.msg, block, block_height, serde_txs=serde_txs) if block_timestamp is None: continue logging.info("Found %s which contains %s" % (b2lx(found_tx.GetTxid()), b2x(unconfirmed_tx.tip_timestamp.msg))) # Success! (tip_timestamp, commitment_timestamps) = self.__pending_to_merkle_tree( unconfirmed_tx.n) mined_tx = TimestampTx(found_tx, tip_timestamp, commitment_timestamps) assert tip_timestamp.msg == unconfirmed_tx.tip_timestamp.msg mined_tx.tip_timestamp.merge(block_timestamp) for commitment in tuple( self.pending_commitments)[0:unconfirmed_tx.n]: self.pending_commitments.remove(commitment) logging.debug("Removed commitment %s from pending" % b2x(commitment)) assert self.min_confirmations > 1 logging.info( "Success! %d commitments timestamped, now waiting for %d more confirmations" % (len(mined_tx.commitment_timestamps), self.min_confirmations - 1)) # Add pending_tx to the list of timestamp transactions that # have been mined, and are waiting for confirmations. self.txs_waiting_for_confirmation[block_height] = mined_tx # Erasing all unconfirmed txs if the transaction was mine if mined_tx.tx.GetTxid() in self.mines: logging.info("Tx was mine, deleting all my unconfirmed") self.unconfirmed_txs.clear() self.mines.clear() else: logging.info("Tx was made by someone else, keeping my txs") # And finally, we can reset the last time a timestamp # transaction was mined to right now. self.last_timestamp_tx = time.time() break if self.unconfirmed_txs: logging.info( "Finished checking digest of all unconfirmed in this block" ) time_to_next_tx = int(self.last_timestamp_tx + self.min_tx_interval - time.time()) if time_to_next_tx > 0: # Minimum interval between transactions hasn't been reached, so do nothing logging.debug("Waiting %ds before next tx" % time_to_next_tx) return prev_tx = None if self.pending_commitments and not self.unconfirmed_txs: logging.debug( "I have pending and no unconfirmed_txs, first tx of this cycle" ) # Find the biggest unspent output that's confirmed unspent = find_unspent(proxy) if not len(unspent): logging.error("Can't timestamp; no spendable outputs") return change_scriptPubKey = self.change_scriptPubKey if change_scriptPubKey is None: # For the change scriptPubKey, we can save a few bytes by using # a pay-to-pubkey rather than the usual pay-to-pubkeyhash change_addr = proxy.getnewaddress() change_pubkey = proxy.validateaddress(change_addr)['pubkey'] change_scriptPubKey = CScript([change_pubkey, OP_CHECKSIG]) prev_tx = self.__create_new_timestamp_tx_template( unspent[-1]['outpoint'], unspent[-1]['amount'], change_scriptPubKey) logging.debug('New timestamp tx, spending output %r, value %s' % (unspent[-1]['outpoint'], str_money_value(unspent[-1]['amount']))) elif self.unconfirmed_txs and self.pending_commitments: logging.debug("I have unconfirmed_txs and pending commitments") assert self.pending_commitments # why this, if I have no commitments in this cycle? (prev_tx, prev_tip_timestamp, prev_commitment_timestamps) = self.unconfirmed_txs[-1] # Send the transaction if prev_tx: logging.debug("prev_tx is %s" % b2lx(prev_tx.GetTxid())) (tip_timestamp, commitment_timestamps) = self.__pending_to_merkle_tree( len(self.pending_commitments)) logging.debug("New tip is %s" % b2x(tip_timestamp.msg)) # make_merkle_tree() seems to take long enough on really big adds # that the proxy dies proxy = bitcoin.rpc.Proxy() sent_tx = None relay_feerate = self.relay_feerate while sent_tx is None: unsigned_tx = self.__update_timestamp_tx( prev_tx, tip_timestamp.msg, proxy.getblockcount(), relay_feerate) fee = _get_tx_fee(unsigned_tx, proxy) if fee is None: logging.debug( "Can't determine txfee of transaction; skipping") return if fee > self.max_fee: logging.error("Maximum txfee reached!") return r = proxy.signrawtransaction(unsigned_tx) if not r['complete']: logging.error("Failed to sign transaction! r = %r" % r) return signed_tx = r['tx'] try: rand = random.random() if self.btc_net == 'mainnet' or ( self.btc_net != 'mainnet' and rand < self.btc_broadcast_ratio): txid = proxy.sendrawtransaction(signed_tx) else: logging.info( "I am not broadcasting %s to emulate mainnet fees. (%f/%f)" % (b2lx(signed_tx.GetTxid()), rand, self.btc_broadcast_ratio)) except bitcoin.rpc.JSONRPCError as err: if err.error['code'] == -26: logging.debug("Err: %r" % err.error) # Insufficient priority - basically means we didn't # pay enough, so try again with a higher feerate relay_feerate *= 2 continue else: raise err # something else, fail! sent_tx = signed_tx if self.unconfirmed_txs: logging.info( "Sent timestamp tx %s, replacing %s; %d total commitments; %d prior tx versions" % (b2lx(sent_tx.GetTxid()), b2lx(prev_tx.GetTxid()), len(commitment_timestamps), len(self.unconfirmed_txs))) else: logging.info( "Sent timestamp tx %s; %d total commitments" % (b2lx(sent_tx.GetTxid()), len(commitment_timestamps))) self.unconfirmed_txs.append( UnconfirmedTimestampTx(sent_tx, tip_timestamp, len(commitment_timestamps))) self.mines.add(sent_tx.GetTxid()) self.last_tip = tip_timestamp else: logging.debug("prev_tx is None")
def __do_bitcoin(self): """Do Bitcoin-related maintenance""" # FIXME: we shouldn't have to create a new proxy each time, but with # current python-bitcoinlib and the RPC implementation it seems that # the proxy connection can timeout w/o recovering properly. proxy = bitcoin.rpc.Proxy() new_blocks = self.known_blocks.update_from_proxy(proxy) for (block_height, block_hash) in new_blocks: logging.info("New block %s at height %d" % (b2lx(block_hash), block_height)) # Save commitments to disk that have reached min_confirmations confirmed_tx = self.txs_waiting_for_confirmation.pop(block_height - self.min_confirmations + 1, None) if confirmed_tx is not None: self.__save_confirmed_timestamp_tx(confirmed_tx) # If there already are txs waiting for confirmation at this # block_height, there was a reorg and those pending commitments now # need to be added back to the pool reorged_tx = self.txs_waiting_for_confirmation.pop(block_height, None) if reorged_tx is not None: # FIXME: the reorged transaction might get mined in another # block, so just adding the commitments for it back to the pool # isn't ideal, but it is safe logging.info('tx %s at height %d removed by reorg, adding %d commitments back to pending' % (b2lx(reorged_tx.tx.GetHash()), block_height, len(reorged_tx.commitment_timestamps))) for reorged_commitment_timestamp in reorged_tx.commitment_timestamps: self.pending_commitments.add(reorged_commitment_timestamp.msg) # Check if this block contains any of the pending transactions try: block = proxy.getblock(block_hash) except KeyError: # Must have been a reorg or something, return logging.error("Failed to get block") return # Check all potential pending txs against this block. for unconfirmed_tx in self.unconfirmed_txs: block_timestamp = make_timestamp_from_block(unconfirmed_tx.tip_timestamp.msg, block, block_height) if block_timestamp is None: continue # Success! (tip_timestamp, commitment_timestamps) = self.__pending_to_merkle_tree(unconfirmed_tx.n) mined_tx = TimestampTx(unconfirmed_tx.tx, tip_timestamp, commitment_timestamps) assert tip_timestamp.msg == unconfirmed_tx.tip_timestamp.msg mined_tx.tip_timestamp.merge(block_timestamp) for commitment in tuple(self.pending_commitments)[0:unconfirmed_tx.n]: self.pending_commitments.remove(commitment) logging.debug("Removed commitment %s from pending" % b2x(commitment)) assert self.min_confirmations > 1 logging.info("Success! %d commitments timestamped, now waiting for %d more confirmations" % (len(mined_tx.commitment_timestamps), self.min_confirmations - 1)) # Add pending_tx to the list of timestamp transactions that # have been mined, and are waiting for confirmations. self.txs_waiting_for_confirmation[block_height] = mined_tx # Since all unconfirmed txs conflict with each other, we can clear the entire lot self.unconfirmed_txs.clear() # And finally, we can reset the last time a timestamp # transaction was mined to right now. self.last_timestamp_tx = time.time() break time_to_next_tx = int(self.last_timestamp_tx + self.min_tx_interval - time.time()) if time_to_next_tx > 0: # Minimum interval between transactions hasn't been reached, so do nothing logging.debug("Waiting %ds before next tx" % time_to_next_tx) return prev_tx = None if self.pending_commitments and not self.unconfirmed_txs: # Find the biggest unspent output that's confirmed unspent = find_unspent(proxy) if not len(unspent): logging.error("Can't timestamp; no spendable outputs") return # For the change scriptPubKey, we can save a few bytes by using # a pay-to-pubkey rather than the usual pay-to-pubkeyhash change_addr = proxy.getnewaddress() change_pubkey = proxy.validateaddress(change_addr)['pubkey'] change_scriptPubKey = CScript([change_pubkey, OP_CHECKSIG]) prev_tx = self.__create_new_timestamp_tx_template(unspent[-1]['outpoint'], unspent[-1]['amount'], change_scriptPubKey) logging.debug('New timestamp tx, spending output %r, value %s' % (unspent[-1]['outpoint'], str_money_value(unspent[-1]['amount']))) elif self.unconfirmed_txs: assert self.pending_commitments (prev_tx, prev_tip_timestamp, prev_commitment_timestamps) = self.unconfirmed_txs[-1] # Send the first transaction even if we don't have a new block if prev_tx and (new_blocks or not self.unconfirmed_txs): (tip_timestamp, commitment_timestamps) = self.__pending_to_merkle_tree(len(self.pending_commitments)) # make_merkle_tree() seems to take long enough on really big adds # that the proxy dies proxy = bitcoin.rpc.Proxy() sent_tx = None relay_feerate = self.relay_feerate while sent_tx is None: unsigned_tx = self.__update_timestamp_tx(prev_tx, tip_timestamp.msg, proxy.getblockcount(), relay_feerate) fee = _get_tx_fee(unsigned_tx, proxy) if fee is None: logging.debug("Can't determine txfee of transaction; skipping") return if fee > self.max_fee: logging.error("Maximum txfee reached!") return r = proxy.signrawtransaction(unsigned_tx) if not r['complete']: logging.error("Failed to sign transaction! r = %r" % r) return signed_tx = r['tx'] try: txid = proxy.sendrawtransaction(signed_tx) except bitcoin.rpc.JSONRPCError as err: if err.error['code'] == -26: logging.debug("Err: %r" % err.error) # Insufficient priority - basically means we didn't # pay enough, so try again with a higher feerate relay_feerate *= 2 continue else: raise err # something else, fail! sent_tx = signed_tx if self.unconfirmed_txs: logging.info("Sent timestamp tx %s, replacing %s; %d total commitments; %d prior tx versions" % (b2lx(sent_tx.GetHash()), b2lx(prev_tx.GetHash()), len(commitment_timestamps), len(self.unconfirmed_txs))) else: logging.info("Sent timestamp tx %s; %d total commitments" % (b2lx(sent_tx.GetHash()), len(commitment_timestamps))) self.unconfirmed_txs.append(UnconfirmedTimestampTx(sent_tx, tip_timestamp, len(commitment_timestamps)))
dust_txouts = [ unspent for unspent in proxy.listunspent(0) if unspent['amount'] <= args.dust ] sum_dust_after_fees = 0 for dust_txout in dust_txouts: sum_dust_after_fees += max(dust_txout['amount'] - 1480, 0) if not dust_txouts: print("Your wallet doesn't have any dust in it!") sys.exit(0) print('You have %d dust txouts, worth %s BTC after fees.' % ((len(dust_txouts), str_money_value(sum_dust_after_fees)))) print() print('Get rid of them? y/n: ', end='') choice = raw_input().lower().strip() if choice != 'y': print('Canceled!') sys.exit(1) # User gave the ok, create a NONE|ANYONECANPAY tx spending those txouts txins = [CTxIn(dust_txout['outpoint']) for dust_txout in dust_txouts] txouts = [CTxOut(0, CScript([OP_RETURN]))] tx = CTransaction(txins, txouts)
tx2_fee = tx1_fee if tx1_fee / tx1_size > tx2_fee / tx2_size: d = int(tx1_fee * (tx2_size / tx1_size) - tx2_fee) tx2.vout[0].nValue -= d tx2_fee += d # Pay for the relay bandwidth consumed by the replacement. # # fundrawtransaction can't take this into account, so just calculate that delta # and reduce the change output by it. # # Unfortunately fundrawtransaction returns empty scriptSigs, so we have to # do this after signing to know how big the transaction actually is. relay_bw_fee = int(tx2_size / 1000 * args.relay_bw_feerate * COIN) logging.info("Paying %s for relay bandwidth" % str_money_value(relay_bw_fee)) # TODO: handle case where this brings nValue below the dust limit tx2.vout[0].nValue -= relay_bw_fee r = rpc.signrawtransaction(tx2) assert r["complete"] tx2 = r["tx"] logging.info( "Old size: %.3f KB, Old fees: %s, %s BTC/KB" % (tx1_size / 1000, str_money_value(tx1_fee), str_money_value((tx1_fee / tx1_size) * 1000)) ), logging.info( "New size: %.3f KB, New fees: %s, %s BTC/KB" % (tx2_size / 1000, str_money_value(tx2_fee), str_money_value((tx2_fee / tx2_size) * 1000))
def attack_command(args): #args.starting_height = 2**32-1 #scan_command(args) fd = open('sent-txs','a') for txhash in args.rpc.getrawmempool(): txhash = lx(txhash) tx = args.rpc.getrawtransaction(txhash) args.wallet.scan_tx(tx) args.fee_per_kb = int(args.fee_per_kb * COIN) # deque of transaction outputs, (COutPoint, CTxOut), that we have available # to spend. We use these outputs in order, oldest first. available_txouts = collections.deque() # gather up existing outputs total_funds = 0 for outpoint, txout in args.wallet.unspent_txouts.items(): total_funds += txout.nValue available_txouts.append((outpoint, txout)) size_sent = 0 while available_txouts: logging.info('Attacking! Sent %d bytes total, Funds left: %s in %d txouts' % (size_sent, str_money_value(total_funds), len(available_txouts))) tx = CTransaction() # Gather up txouts until we have enough funds in to pay the fees on a # target-sized tx as well as the non-dust outputs. sum_value_in = 0 # Assuming the whole tx is CTxOut's, each one is 46 bytes (1-of-1 # CHECKMULTISIG) and the value out needs to be at least 1000 satoshis. avg_txout_size = 46 #25+1+8 num_txouts = args.target_tx_size // avg_txout_size min_value_out = 10000 sum_min_value_out = num_txouts * min_value_out fees = (args.target_tx_size/1000) * args.fee_per_kb inputs = {} tx_size = len(tx.serialize()) dummy_scriptSig = CScript([b'\x00'*74]) while (sum_value_in < fees + sum_min_value_out and tx_size < args.target_tx_size/2 # don't devote more than half the tx to inputs and available_txouts): outpoint, txout = available_txouts.popleft() try: args.rpc.gettxout(outpoint) except IndexError: continue inputs[outpoint] = txout sum_value_in += txout.nValue # The CTxIn has a dummy signature so size calculations will be right txin = CTxIn(outpoint, dummy_scriptSig) tx.vin.append(txin) tx_size += len(txin.serialize()) total_funds -= sum_value_in # Recalculate number of txouts we'll have now that we've added the # txins. Of course, this will leave the actual value per txout a bit # high, but whatever. num_txouts = int(min((args.target_tx_size-len(tx.serialize())) / avg_txout_size, (sum_value_in - fees) / min_value_out)) # Split the funds out evenly among all transaction outputs. per_txout_value = (sum_value_in - fees) // num_txouts for i in range(num_txouts): scriptPubKey = args.wallet.make_multisig() txout = CTxOut(per_txout_value, scriptPubKey) tx.vout.append(txout) # Sign the transaction for (i, txin) in enumerate(tx.vin): prevout_scriptPubKey = inputs[txin.prevout].scriptPubKey sighash = SignatureHash(prevout_scriptPubKey, tx, i, SIGHASH_ALL) seckey = args.wallet.keypairs[prevout_scriptPubKey] sig = seckey.sign(sighash) + bytes([SIGHASH_ALL]) if prevout_scriptPubKey[-1] == OP_CHECKMULTISIG: txin.scriptSig = CScript([OP_0, sig]) elif prevout_scriptPubKey[-1] == OP_CHECKSIG and prevout_scriptPubKey[-2] == OP_EQUALVERIFY: txin.scriptSig = CScript([sig, seckey.pub]) VerifyScript(txin.scriptSig, prevout_scriptPubKey, tx, i) # Add the new txouts to the list of available txouts tx_hash = tx.get_hash() sum_value_out = 0 for i, txout in enumerate(tx.vout): outpoint = COutPoint(tx_hash, i) available_txouts.append((outpoint, txout)) sum_value_out += txout.nValue total_funds += sum_value_out actual_fees = sum_value_in - sum_value_out serialized_tx = tx.serialize() logging.debug('Sending tx %s\n' ' value in: %s, value out: %s, fees: %s, fees/KB: %s\n' ' size: %d, # of inputs: %d, # of outputs: %d, txout.nValue: %s' % (b2lx(tx_hash), str_money_value(sum_value_in), str_money_value(sum_value_out), str_money_value(actual_fees), str_money_value(actual_fees/(len(serialized_tx)/1000)), len(serialized_tx), len(tx.vin), len(tx.vout), per_txout_value)) size_sent += len(serialized_tx) #print(b2x(serialized_tx)) #args.wallet.save() try: args.rpc.sendrawtransaction(tx) fd.write(b2x(serialized_tx) + '\n') fd.flush() except bitcoin.rpc.JSONRPCException as exp: print(b2x(tx.serialize())) #import pdb; pdb.set_trace() time.sleep(random.randrange(30,60))
if tx1_fee / tx1_size > tx2_fee / tx2_size: d = int(tx1_fee * (tx2_size / tx1_size) - tx2_fee) tx2.vout[0].nValue -= d tx2_fee += d # Pay for the relay bandwidth consumed by the replacement. # # fundrawtransaction can't take this into account, so just calculate that delta # and reduce the change output by it. # # Unfortunately fundrawtransaction returns empty scriptSigs, so we have to # do this after signing to know how big the transaction actually is. relay_bw_fee = int(tx2_size / 1000 * args.relay_bw_feerate * COIN) logging.info('Paying %s for relay bandwidth' % str_money_value(relay_bw_fee)) # TODO: handle case where this brings nValue below the dust limit tx2.vout[0].nValue -= relay_bw_fee r = rpc.signrawtransaction(tx2) assert (r['complete']) tx2 = r['tx'] logging.info('Old size: %.3f KB, Old fees: %s, %s BTC/KB' % \ (tx1_size / 1000, str_money_value(tx1_fee), str_money_value((tx1_fee/tx1_size) * 1000))), logging.info('New size: %.3f KB, New fees: %s, %s BTC/KB' % \ (tx2_size / 1000, str_money_value(tx2_fee),
def adjust_refund_tx(self, our_setup_txid, their_setup_tx_hex, their_refund_tx_hex, received_tx_hex=None, their_first_sig=None, their_second_sig=None): #Calculate chunk sizes. remaining = self.upload_amount - self.trade.sent send_chunk_size = self.our_chunk_size if send_chunk_size > remaining: send_chunk_size = remaining remaining = self.download_amount - self.trade.recv recv_chunk_size = self.their_chunk_size if recv_chunk_size > remaining: recv_chunk_size = remaining #Validate transactions. if received_tx_hex != None and their_first_sig != None and their_second_sig != None: #Check their refund spends the output of the bond. their_refund_tx = CTransaction.deserialize(binascii.unhexlify(their_refund_tx_hex)) their_setup_txid = calculate_txid(their_setup_tx_hex) if reverse_hex(binascii.hexlify(their_refund_tx.vin[0].prevout.hash).decode("utf-8")) != their_setup_txid: print("11111") return None else: """ try: #Ensure the bond transaction has been broadcast. self.recv_coin_rpc.sendrawtransaction(their_bond_tx_hex) #(Subsequent code will fail since bond has only just been broadcast.) #return None except Exception as e: #Transaction already in block chain. pass """ #Check our received payment is as expected. unsigned_tx = CTransaction.deserialize(binascii.unhexlify(received_tx_hex)) expected = self.trade.recv + recv_chunk_size #Check transaction input. their_alleged_setup_txid = reverse_hex(binascii.hexlify(unsigned_tx.vin[0].prevout.hash).decode("utf-8")) if their_alleged_setup_txid != their_setup_txid: #Give them the benefit of the doubt - look for tx malluability. alleged_setup_tx_hex = recv_coin_rpc.getrawtransaction(their_alleged_setup_txid) if not compare_transactions(alleged_setup_tx_hex, their_setup_tx_hex): print("22222222@") return None #This is what our output -should- look like. our_address = deconstruct_address(self.our_address)["hash"] our_pub_key = CScript([OP_DUP, OP_HASH160, our_address, OP_EQUALVERIFY, OP_CHECKSIG]) #Check an output goes to us with expected amount. amount_found = 0 for output in unsigned_tx.vout: print(output.scriptPubKey) print(our_pub_key) if output.scriptPubKey == our_pub_key: amount_found = Decimal(str_money_value(output.nValue)) break if not amount_found: print(our_pub_key) print(unsigned_tx.vout) print("333333333333") return None else: #Check amount. if amount_found < expected.as_decimal: print("43535345346") print(amount_found) print(expected.as_decimal) return None #Check transaction isn't time locked. if unsigned_tx.nLockTime: print("4444444444") return None #Check sequences are final. for input in unsigned_tx.vin: if input.nSequence != 0xffffffff: print("55555555555") return None #Check transaction can be spent. our_first_sig = self.sign_refund_tx(received_tx_hex, 1, "them") ret = self.check_refund_works(received_tx_hex, their_first_sig, their_second_sig, our_first_sig, "them") if ret == None: print("6666666666666") return None self.details["our_download"] = ret else: #The first call to this function has nothing to evaluate -- you are receiving nothing. recv_chunk_size = 0 #Adjust refund. refund_amount = self.trade.sent + send_chunk_size print("sdfsdfsdf--------") print(refund_amount) refund_tx_hex = self.build_refund_tx(our_setup_txid, refund_amount)["tx_hex"] self.trade.sent += send_chunk_size self.trade.recv += recv_chunk_size #Save details. self.update() #Is transfer complete? I.e. no change = transfer complete. if send_chunk_size == C(0) and recv_chunk_size == C(0): #I liek chocolate milk. return 1 #Return result. our_first_sig = self.sign_refund_tx(refund_tx_hex, 1) our_second_sig = self.sign_refund_tx(refund_tx_hex, 2) return { "tx_hex": refund_tx_hex, "first_sig": our_first_sig, "second_sig": our_second_sig }
def validate_setup_tx(config, tx_hex, ecdsa_1, ecdsa_2, sig_1, sig_2, sig_3=None, collateral_info=None, trade_fee=C(0)): #Init. ecdsa_encrypted = ECDSACrypt(config["green_address_server"]["encrypted_key_pair"]["pub"]) ecdsa_offline = ECDSACrypt(config["green_address_server"]["offline_key_pair"]["pub"]) ecdsa_fee = ECDSACrypt(config["fee_key_pair"]["pub"]) tx = CTransaction.deserialize(binascii.unhexlify(tx_hex)) #Check txin is as expected. if len(tx.vin) != 1: return -1 deposit_txid = b2lx(tx.vin[0].prevout.hash) #Check transaction is final. if tx.vin[0].nSequence != 0xffffffff: return -5 if tx.nLockTime != 0: return -6 #Determine contract outs. if len(tx.vout) < 2: return -3 contract_vouts = tx.vout[1:] change_script_pub = CScript([ OP_DUP, OP_HASH160, Hash160(ecdsa_1.get_public_key("bin")), OP_EQUALVERIFY, OP_CHECKSIG ]) if contract_vouts[-1].scriptPubKey == change_script_pub: change_vout = contract_vouts.pop() change_total = Decimal(str_money_value(change_vout.nValue)) else: change_vout = None change_total = Decimal(0) #Total contract sums. contract_total = Decimal(0) for contract_vout in contract_vouts: contract_total += Decimal(str_money_value(contract_vout.nValue)) #Validate fee output. fee_script_pub = CScript([ OP_DUP, OP_HASH160, Hash160(ecdsa_fee.get_public_key("bin")), OP_EQUALVERIFY, OP_CHECKSIG ]) fee_vout = tx.vout[0] fee_total = Decimal(str_money_value(fee_vout.nValue)) if fee_vout.scriptPubKey != fee_script_pub: return -4 #Validate contract collateral. if collateral_info != None: remaining = collateral_info.copy() collateral_total = C(0) for pub_key_1 in list(collateral_info): #Check their ecdsa 1 signature. ecdsa_them = [] ecdsa_pair = ECDSACrypt(pub_key_1) sig = collateral_info[pub_key_1]["sig_1"] chunk_size = collateral_info[pub_key_1]["chunk_size"] arbiter_pub = collateral_info[pub_key_1]["pub_arbiter"] msg = "%s %s" % (chunk_size, arbiter_pub) if not ecdsa_pair.valid_signature(sig, msg): continue else: ecdsa_them.append(ecdsa_pair) #Check their ecdsa 2 signature. ecdsa_pair = ECDSACrypt(collateral_info[pub_key_1]["pub_2"]) sig = collateral_info[pub_key_1]["sig_2"] if not ecdsa_pair.valid_signature(sig, msg): continue else: ecdsa_them.append(ecdsa_pair) #Find their contract in setup_tx. for vout in contract_vouts: ecdsa_us = [ecdsa_1, ecdsa_2] ecdsa_arbiter = ECDSACrypt(arbiter_pub) redeem_script = bond_redeem_script(ecdsa_us, ecdsa_them, ecdsa_arbiter) redeem_script_hash160 = hash160_script(redeem_script) p2sh_script_pub_key = CScript([OP_HASH160, redeem_script_hash160["bin"], OP_EQUAL]) if vout.scriptPubKey != p2sh_script_pub_key: continue else: #Update remaining structure to indicate this validated. if pub_key_1 in remaining: del remaining[pub_key_1] #Update collateral amount. collateral_total += collateral_info[pub_key_1]["chunk_size"] #Check the fee amount is valid given claimed collaterals. if not len(list(remaining)): total_coins = C(fee_total + contract_total) expected_fee = collateral_total + ((total_coins - C(change_total)) * trade_fee) print(collateral_total) print(expected_fee) print(fee_total) if expected_fee != C(fee_total): print("Fee validation did not parse!") return -40 else: print("fdsfdsdfsdf num validation was all successful.") #Calculate txid. setup_txid = calculate_txid(b2x(tx.serialize())) #Check setup tx works. setup_signed_tx = None setup_signed_txid = None if sig_3 != None or (sig_1 == None and sig_2 == None and sig_3 == None): redeem_script = green_redeem_script(ecdsa_1, ecdsa_2, ecdsa_encrypted, ecdsa_offline) tx_hex = b2x(tx.serialize()) ret = check_setup_works(tx_hex, redeem_script, sig_1, sig_2, sig_3) if ret != None: setup_signed_tx = CTransaction.deserialize(binascii.unhexlify(ret["tx_hex"])) setup_signed_txid = ret["txid"] ret = { "deposit": { "txid": deposit_txid }, "setup": { "tx": tx, "txid": setup_txid, "sig_1": sig_1, "sig_2": sig_2, "sig_3": sig_3, "signed": { "tx": setup_signed_tx, "txid": setup_signed_txid } }, "contract": { "vouts": contract_vouts, "total": contract_total }, "fee": { "vout": fee_vout, "total": fee_total }, "change": { "vout": change_vout, "total": change_total } } return ret
def parse_match_msg(self, msg, version, ntp, con=None): #Unpack values. version, ntp, rpc, contract_msg, our_handshake_msg, their_handshake_msg, collateral_info, instance_id, ip_addr, port = msg.split( " ") collateral_info = base64.b64decode( collateral_info.encode("ascii")).decode("utf-8") contract_msg = base64.b64decode( contract_msg.encode("ascii")).decode("utf-8") contract_hash = hashlib.sha256( contract_msg.encode("ascii")).hexdigest() our_handshake_msg = base64.b64decode( our_handshake_msg.encode("ascii")).decode("utf-8") our_handshake = self.parse_handshake_msg(our_handshake_msg) their_handshake_msg = base64.b64decode( their_handshake_msg.encode("ascii")).decode("utf-8") their_handshake = self.parse_handshake_msg(their_handshake_msg) contract = self.hybrid_protocol.parse_contract(contract_msg) print(collateral_info) #Does the instance exist? if instance_id not in self.instances: print(self.instances) print(instance_id) print("Instance doesn't exist mate.") return [] else: #Record IP address. self.instances[instance_id]["con"]["ip_addr"] = ip_addr self.instances[instance_id]["con"]["port"] = port #Are the handshake hashes valid? if our_handshake["contract_hash"] != their_handshake["contract_hash"]: print("errro 234324234") return [] #What are you doing? if our_handshake["contract_hash"] != contract_hash: print("erorr 34545435") return [] #Nice try. #Check "our handshake" is valid. actor = None ecdsa_pairs = [ contract["buyer"]["ecdsa"][0], contract["seller"]["ecdsa"][0] ] our_handshake_msg = our_handshake_msg.split(" ") our_handshake_sig = our_handshake_msg.pop() for ecdsa_pair in ecdsa_pairs: if ecdsa_pair.valid_signature(our_handshake_sig, " ".join(our_handshake_msg)): if ecdsa_pair.get_public_key( ) == contract["buyer"]["ecdsa"][0].get_public_key(): actor = "buyer" else: actor = "seller" ecdsa_pairs.remove(ecdsa_pair) break if len(ecdsa_pairs) == 2: print("Invalid handshake 1.") return [] #Check "their handshake" is valid. their_handshake_msg = their_handshake_msg.split(" ") their_handshake_sig = their_handshake_msg.pop() if not ecdsa_pairs[0].valid_signature(their_handshake_sig, " ".join(their_handshake_msg)): print("Invalid handshake 2.") return [] #Check signed collateral info msg. chunk_size, arbiter_pub, sig_1, pub_1, sig_2, pub_2 = collateral_info.split( " ") if C(chunk_size) != contract["seller"]["chunk_size"] and C( chunk_size) != contract["buyer"]["chunk_size"]: print("errror 55556") return [] if pub_1 != contract[actor]["ecdsa"][0].get_public_key(): print("errorr 55557") return [] collateral_unsigned = "%s %s" % (chunk_size, arbiter_pub) if not contract[actor]["ecdsa"][0].valid_signature( sig_1, collateral_unsigned): print("errorr 55558") return [] if not contract[actor]["ecdsa"][1].valid_signature( sig_2, collateral_unsigned): print("errror 55559") return [] #Record proof they agree on the transfer size. print("Storing collateral_info") print(contract_hash) print(instance_id) self.instances[instance_id]["collateral_info"][ contract_hash] = collateral_info print(self.instances) #Check whether contract has any existing satisfied setup TXs. #As in, someone who has setup the right outputs in an existing setup tx. sides = {"buyer": "seller", "seller": "buyer"} found = {} for side in list(sides): #Have we seen this before? deposit_txid = contract[side]["deposit_txid"] if not deposit_txid in self.green_addresses: continue #Find a valid contract output. instance = self.green_addresses[deposit_txid] index = 1 for vout in instance["contract"]["vouts"]: #Is this our contract? ecdsa_us = contract[side]["ecdsa"] ecdsa_them = contract[sides[side]]["ecdsa"] ecdsa_arbiter = ECDSACrypt( self.config["arbiter_key_pairs"][0]["pub"]) redeem_script = bond_redeem_script(ecdsa_us, ecdsa_them, ecdsa_arbiter) redeem_script_hash160 = hash160_script(redeem_script) p2sh_script_pub_key = CScript( [OP_HASH160, redeem_script_hash160["bin"], OP_EQUAL]) if vout.scriptPubKey != p2sh_script_pub_key: index += 1 continue #Do the amounts add up? vout_amount = Decimal(str_money_value(vout.nValue)) if side == "buyer": calibrated_amount = contract["trade"].total.as_decimal calibrated_amount -= contract["buyer"][ "chunk_size"].as_decimal else: calibrated_amount = contract["trade"].amount.as_decimal calibrated_amount -= contract["seller"][ "chunk_size"].as_decimal if vout_amount < calibrated_amount: print("Insufficent vout_amount.") index += 1 continue #Is there a valid collateral amount? if instance["collateral_info"] == {}: continue #Record details. found[side] = {"instance": instance, "vout": vout} #Save chunk size details. instance["chunks"][index] = {} instance["chunks"][index]["buyer"] = contract["buyer"][ "chunk_size"] instance["chunks"][index]["seller"] = contract["seller"][ "chunk_size"] break #Both vouts found -- check they both work. if len(list(found)) == 2: print("Both vouts found.") works = found #Lock the outputs in both setups. broadcast = [] for side in list(works): instance = works[side]["instance"] vout = works[side]["vout"] #Lock vouts. index = instance["contract"]["vouts"].index(vout) + 1 if vout not in instance["locked"]: """ The magic + 1 is because vout 0 is already for the fee output. Everything after that is the contract followed by a possible change -- this keeps the vout index in the correct order relative to the TX. """ instance["locked"].append(index) #Their index. their_instance = works[sides[side]]["instance"] their_vout = works[sides[side]]["vout"] their_index = their_instance["contract"]["vouts"].index( their_vout) + 1 #Save instance references for matched contract. their_instance_id = their_instance["id"] instance["contract"]["links"][index] = { "instance_id": their_instance_id, "index": their_index, "contract_hash": contract_hash, } #This means its ready to broadcast. if len(instance["contract"]["vouts"]) == len( instance["locked"]): """ The old TXID for the unsigned transaction is intentionally used to make the client-side logic easier for parsing status messages. Basically, it will tell the client which signed transaction corresponds to their original unsigned transaction and hence which coin client to broadcast it to. """ tx_info = { "tx": instance["setup"]["signed"]["tx"], "txid": instance["setup"]["txid"] } broadcast.append(tx_info) #All inputs locked - broadcast setup TX. if len(broadcast) == 2: print(broadcast[0]) print(broadcast[1]) print("Broadcasting bruh.") #Check fee output is correct. trade_fee = C(self.config["trade_fee"]) valid_fees = 1 for side in list(works): instance = works[side]["instance"] collateral = C(0) index = 1 #Add up micro-collateral amounts. for vout in instance["contract"]["vouts"]: #Have the chunk amounts been recorded? if index not in instance["chunks"]: break chunks = instance["chunks"][index] if "buyer" not in chunks: break if "seller" not in chunks: break #Calculate relative chunk size. vout_amount = Decimal(str_money_value(vout.nValue)) collateral += chunks[side] #Next contract vout. index += 1 #Check trade fee. total_coins = C(instance["fee"] ["total"]) + instance["contract"]["total"] change = C(instance["change"]["total"]) expected_fee = collateral + ( (total_coins - change) * trade_fee) #Check fee output amount. if expected_fee != C(instance["fee"]["total"]): print("Invalid fees") print(expected_fee) print(C(instance["fee"]["total"])) valid_fees = 0 #Broadcast. if valid_fees: #There's no circuit checks or anything at this point. #This is a prototype. for side in list(works): #Build ready message. instance = works[side]["instance"] ready_msg = self.new_ready_msg( instance["id"], instance["setup"]["sig_3"], instance["contract"]["links"]) print(ready_msg) #Send message to client. ident = instance["con"]["ip_addr"] + ":" + str( instance["con"]["port"]) if ident in self.clients: print("Successfully broadcast") self.clients[ident].send_line(ready_msg) else: print("Did not broadcast - invalid fees") return []
change_txout = vout break if change_txout is None: # No suitable change txout; no txout was an address in our wallet. # # Create a new txout for use as change. addr = rpc.getrawchangeaddress() change_txout = CMutableTxOut(0, addr.to_scriptPubKey()) tx.vout.append(change_txout) min_change_txout_nValue = 0 if args.first_seen_safe: min_change_txout_nValue = change_txout.nValue logging.debug('First-seen-safe enabled: will not reduce change txout value below %s BTC' % \ str_money_value(min_change_txout_nValue)) # Find total value in value_in = 0 for vin in tx.vin: prevout_tx = rpc.getrawtransaction(vin.prevout.hash) value_in += prevout_tx.vout[vin.prevout.n].nValue value_out = sum([vout.nValue for vout in tx.vout]) # Units: satoshi's per byte old_fees_per_byte = (value_in-value_out) / len(tx.serialize()) desired_fees_per_byte = old_fees_per_byte * args.ratio # Old transaction might have had no fees at all, in which case use the minimum of 0.1mBTC/KB desired_fees_per_byte = max(desired_fees_per_byte, 0.0001*COIN / 1000)
def __do_bitcoin(self): """Do Bitcoin-related maintenance""" # FIXME: we shouldn't have to create a new proxy each time, but with # current python-bitcoinlib and the RPC implementation it seems that # the proxy connection can timeout w/o recovering properly. proxy = bitcoin.rpc.Proxy() new_blocks = self.known_blocks.update_from_proxy(proxy) for (block_height, block_hash) in new_blocks: logging.info("New block %s at height %d" % (b2lx(block_hash), block_height)) # Save commitments to disk that have reached min_confirmations confirmed_tx = self.txs_waiting_for_confirmation.pop(block_height - self.min_confirmations + 1, None) if confirmed_tx is not None: self.__save_confirmed_timestamp_tx(confirmed_tx) # If there already are txs waiting for confirmation at this # block_height, there was a reorg and those pending commitments now # need to be added back to the pool reorged_tx = self.txs_waiting_for_confirmation.pop(block_height, None) if reorged_tx is not None: # FIXME: the reorged transaction might get mined in another # block, so just adding the commitments for it back to the pool # isn't ideal, but it is safe logging.info( "tx %s at height %d removed by reorg, adding %d commitments back to pending" % (b2lx(reorged_tx.tx.GetHash()), block_height, len(reorged_tx.commitment_timestamps)) ) for reorged_commitment_timestamp in reorged_tx.commitment_timestamps: self.pending_commitments.add(reorged_commitment_timestamp.msg) # Check if this block contains any of the pending transactions try: block = proxy.getblock(block_hash) except KeyError: # Must have been a reorg or something, return logging.error("Failed to get block") return # Check all potential pending txs against this block. for tx in self.unconfirmed_txs: block_timestamp = make_timestamp_from_block(tx.tip_timestamp.msg, block, block_height) if block_timestamp is None: continue # Success! tx.tip_timestamp.merge(block_timestamp) for commitment_timestamp in tx.commitment_timestamps: self.pending_commitments.remove(commitment_timestamp.msg) logging.debug("Removed commitment %s from pending" % b2x(commitment_timestamp.msg)) assert self.min_confirmations > 1 logging.info( "Success! %d commitments timestamped, now waiting for %d more confirmations" % (len(tx.commitment_timestamps), self.min_confirmations - 1) ) # Add pending_tx to the list of timestamp transactions that # have been mined, and are waiting for confirmations. self.txs_waiting_for_confirmation[block_height] = tx # Since all unconfirmed txs conflict with each other, we can clear the entire lot self.unconfirmed_txs.clear() # And finally, we can reset the last time a timestamp # transaction was mined to right now. self.last_timestamp_tx = time.time() time_to_next_tx = int(self.last_timestamp_tx + self.min_tx_interval - time.time()) if time_to_next_tx > 0: # Minimum interval between transactions hasn't been reached, so do nothing logging.debug("Waiting %ds before next tx" % time_to_next_tx) return prev_tx = None if self.pending_commitments and not self.unconfirmed_txs: # Find the biggest unspent output that's confirmed unspent = find_unspent(proxy) if not len(unspent): logging.error("Can't timestamp; no spendable outputs") return # For the change scriptPubKey, we can save a few bytes by using # a pay-to-pubkey rather than the usual pay-to-pubkeyhash change_addr = proxy.getnewaddress() change_pubkey = proxy.validateaddress(change_addr)["pubkey"] change_scriptPubKey = CScript([change_pubkey, OP_CHECKSIG]) prev_tx = self.__create_new_timestamp_tx_template( unspent[-1]["outpoint"], unspent[-1]["amount"], change_scriptPubKey ) logging.debug( "New timestamp tx, spending output %r, value %s" % (unspent[-1]["outpoint"], str_money_value(unspent[-1]["amount"])) ) elif self.unconfirmed_txs: (prev_tx, prev_tip_timestamp, prev_commitment_timestamps) = self.unconfirmed_txs[-1] # Send the first transaction even if we don't have a new block if prev_tx and (new_blocks or not self.unconfirmed_txs): # Update the most recent timestamp transaction with new commitments commitment_timestamps = [Timestamp(commitment) for commitment in self.pending_commitments] # Remember that commitment_timestamps contains raw commitments, # which are longer than necessary, so we sha256 them before passing # them to make_merkle_tree, which concatenates whatever it gets (or # for the matter, returns what it gets if there's only one item for # the tree!) commitment_digest_timestamps = [stamp.ops.add(OpSHA256()) for stamp in commitment_timestamps] tip_timestamp = make_merkle_tree(commitment_digest_timestamps) sent_tx = None relay_feerate = self.relay_feerate while sent_tx is None: unsigned_tx = self.__update_timestamp_tx( prev_tx, tip_timestamp.msg, proxy.getblockcount(), relay_feerate ) fee = _get_tx_fee(unsigned_tx, proxy) if fee is None: logging.debug("Can't determine txfee of transaction; skipping") return if fee > self.max_fee: logging.error("Maximum txfee reached!") return r = proxy.signrawtransaction(unsigned_tx) if not r["complete"]: logging.error("Failed to sign transaction! r = %r" % r) return signed_tx = r["tx"] try: txid = proxy.sendrawtransaction(signed_tx) except bitcoin.rpc.JSONRPCError as err: if err.error["code"] == -26: logging.debug("Err: %r" % err.error) # Insufficient priority - basically means we didn't # pay enough, so try again with a higher feerate relay_feerate *= 2 continue else: raise err # something else, fail! sent_tx = signed_tx if self.unconfirmed_txs: logging.info( "Sent timestamp tx %s, replacing %s; %d total commitments" % (b2lx(sent_tx.GetHash()), b2lx(prev_tx.GetHash()), len(commitment_timestamps)) ) else: logging.info( "Sent timestamp tx %s; %d total commitments" % (b2lx(sent_tx.GetHash()), len(commitment_timestamps)) ) self.unconfirmed_txs.append(TimestampTx(sent_tx, tip_timestamp, commitment_timestamps))
except Exception: continue for txin in tx.vin: try: txout_info = proxy.gettxout(txin.prevout) except IndexError: print('Already spent! line %d, txid %s %d' % \ (line, b2lx(txin.prevout.hash), txin.prevout.n), file=sys.stderr) continue print('line %d: %s %d: %s' % \ (line, b2lx(txin.prevout.hash), txin.prevout.n, str_money_value(txout_info['txout'].nValue)), file=sys.stderr) sum_value_in += txout_info['txout'].nValue if txin.prevout not in prevouts: prevouts.add(txin.prevout) txins.append(txin) else: print('Dup! line %d, txid %s %d' % \ (line, b2lx(txin.prevout.hash), txin.prevout.n), file=sys.stderr) random.shuffle(txins) tx = CTransaction(txins, [CTxOut(0, CScript([OP_RETURN]))])
change_txout = vout break if change_txout is None: # No suitable change txout; no txout was an address in our wallet. # # Create a new txout for use as change. addr = rpc.getrawchangeaddress() change_txout = CMutableTxOut(0, addr.to_scriptPubKey()) tx.vout.append(change_txout) min_change_txout_nValue = 0 if args.first_seen_safe: min_change_txout_nValue = change_txout.nValue logging.debug('First-seen-safe enabled: will not reduce change txout value below %s BTC' % \ str_money_value(min_change_txout_nValue)) # Find total value in value_in = 0 for vin in tx.vin: prevout_tx = rpc.getrawtransaction(vin.prevout.hash) value_in += prevout_tx.vout[vin.prevout.n].nValue value_out = sum([vout.nValue for vout in tx.vout]) # Units: satoshi's per byte old_fees_per_byte = (value_in - value_out) / len(tx.serialize()) desired_fees_per_byte = old_fees_per_byte * args.ratio # Old transaction might have had no fees at all, in which case use the minimum of 0.1mBTC/KB desired_fees_per_byte = max(desired_fees_per_byte, 0.0001 * COIN / 1000)
tx = CTransaction.deserialize(x(l)) except Exception: continue for txin in tx.vin: try: txout_info = proxy.gettxout(txin.prevout) except IndexError: print('Already spent! line %d, txid %s %d' % \ (line, b2lx(txin.prevout.hash), txin.prevout.n), file=sys.stderr) continue print('line %d: %s %d: %s' % \ (line, b2lx(txin.prevout.hash), txin.prevout.n, str_money_value(txout_info['txout'].nValue)), file=sys.stderr) sum_value_in += txout_info['txout'].nValue if txin.prevout not in prevouts: prevouts.add(txin.prevout) txins.append(txin) else: print('Dup! line %d, txid %s %d' % \ (line, b2lx(txin.prevout.hash), txin.prevout.n), file=sys.stderr) random.shuffle(txins) tx = CTransaction(txins, [CTxOut(0, CScript([OP_RETURN]))])
def __str__(self): return "TxOut({}, {}, {}, {})".format(b2x(self.tx), self.nout, self.addr, str_money_value(self.value))
def db2t_tx(dtx): t = {} t["txid"] = dtx.hash t["network"] = 'bitcoin' t['hash'] = dtx.hash t['version'] = dtx.version t['locktime'] = dtx.lock_time confirm = db_session.execute('select get_confirm(%d)' % dtx.id).first()[0]; if confirm ==None: t['confirmations'] = 0 else: t['confirmations'] =confirm t['inputs'] = [] t['outputs'] = [] blktx = BlockTx.query.filter(BlockTx.tx_id == dtx.id).limit(1).first() if blktx != None: blkid = blktx.blk_id blk = Block.query.filter(Block.id == blkid).limit(1).first() if blk: t['blockhash'] = blk.hash t['blockheight'] = blk.height t['blocktime'] = blk.time t['time'] = blk.time t['blockindex'] = BlockTx.query.filter(BlockTx.blk_id == blkid, BlockTx.tx_id == dtx.id).first().idx else: t['time'] = dtx.recv_time txinlist = TxIn.query.filter(TxIn.tx_id == dtx.id).order_by(TxIn.tx_idx.asc()).all() for vin in txinlist: inp = {} if dtx.coinbase: inp['script'] = vin.script_sig else: inp['hash'] = vin.prev_out inp['vout'] = vin.prev_out_index inp['script'] = vin.script_sig inp['q'] = vin.sequence prev_tx = Tx.query.filter(Tx.hash == vin.prev_out.decode('hex')).first() if prev_tx: prev_txout = TxOut.query.filter( TxOut.tx_id == prev_tx.id, TxOut.tx_idx == vin.prev_out_index).first() if prev_txout: inp['address'] = '' address = VOUT.query.with_entities(VOUT.address).filter(and_(VOUT.txout_tx_id==prev_tx.id, VOUT.out_idx==prev_txout.tx_idx)).order_by(VOUT.in_idx).all() for addr in address: inp['address'] = inp['address'] + addr[0] + ',' inp['address'] =inp['address'][0:-1] inp['amountSatoshi'] = str(prev_txout.value) inp['amount'] = str_money_value(prev_txout.value) t['inputs'].append(inp) txoutlist = TxOut.query.filter(TxOut.tx_id == dtx.id).order_by(TxOut.tx_idx.asc()).all() for vout in txoutlist: outp = {} outp['address'] = '' address = VOUT.query.with_entities(VOUT.address).filter(and_(VOUT.txout_tx_id==dtx.id, VOUT.out_idx==vout.tx_idx)).order_by(VOUT.out_idx).all() for addr in address: if addr[0] is not None:# http://qukuai.com/tx/d9bdd00b373a92fd64b595263e3ac47841ca3b90ae7f5efdd423865ee3833eda outp['address'] = outp['address'] + addr[0] + ',' outp['address'] =outp['address'][0:-1] outp['amountSatoshi'] = str(vout.value) outp['amount'] = str_money_value(vout.value) outp['script'] = vout.pk_script t['outputs'].append(outp) return t
for bad_addr in args.bad_addr: bad_addr = CBitcoinAddress(bad_addr) txout = CTxOut(args.dust, bad_addr.to_scriptPubKey()) tx.vout.append(txout) # Add inputs until we meet the fee1 threshold unspent = sorted(rpc.listunspent(1), key=lambda x: x['amount']) value_in = 0 value_out = sum([vout.nValue for vout in tx.vout]) while (value_in - value_out) / len(tx.serialize()) < feeperbyte1: # What's the delta fee that we need to get to our desired fees per byte at # the current tx size? delta_fee = math.ceil((feeperbyte1 * len(tx.serialize())) - (value_in - value_out)) logging.debug('Delta fee: %s' % str_money_value(delta_fee)) # If we simply subtract that from the change outpoint are we still above # the dust threshold? if change_txout.nValue - delta_fee > args.dust: change_txout.nValue -= delta_fee value_out -= delta_fee # Do we need to add another input? if value_in - value_out < 0: new_outpoint = unspent[-1]['outpoint'] new_amount = unspent[-1]['amount'] unspent = unspent[:-1] logging.debug('Adding new input %s:%d with value %s BTC' % \ (b2lx(new_outpoint.hash), new_outpoint.n,
tx2_fee = tx1_fee if tx1_fee / tx1_size > tx2_fee / tx2_size: d = int(tx1_fee * (tx2_size / tx1_size) - tx2_fee) tx2.vout[0].nValue -= d tx2_fee += d # Pay for the relay bandwidth consumed by the replacement. # # fundrawtransaction can't take this into account, so just calculate that delta # and reduce the change output by it. # # Unfortunately fundrawtransaction returns empty scriptSigs, so we have to # do this after signing to know how big the transaction actually is. relay_bw_fee = int(tx2_size/1000 * args.relay_bw_feerate*COIN) logging.info('Paying %s for relay bandwidth' % str_money_value(relay_bw_fee)) # TODO: handle case where this brings nValue below the dust limit tx2.vout[0].nValue -= relay_bw_fee r = rpc.signrawtransaction(tx2) assert(r['complete']) tx2 = r['tx'] logging.info('Old size: %.3f KB, Old fees: %s, %s BTC/KB' % \ (tx1_size / 1000, str_money_value(tx1_fee), str_money_value((tx1_fee/tx1_size) * 1000))), logging.info('New size: %.3f KB, New fees: %s, %s BTC/KB' % \ (tx2_size / 1000, str_money_value(tx2_fee),
def __do_bitcoin(self): """Do Bitcoin-related maintenance""" # FIXME: we shouldn't have to create a new proxy each time, but with # current python-bitcoinlib and the RPC implementation it seems that # the proxy connection can timeout w/o recovering properly. proxy = bitcoin.rpc.Proxy() new_blocks = self.known_blocks.update_from_proxy(proxy) # code after this if it's executed only when we have new blocks, it simplify reasoning at the cost of not # having a broadcasted tx immediately after we have a new cycle (the calendar wait the next block) if not new_blocks: return for (block_height, block_hash) in new_blocks: logging.info("New block %s at height %d" % (b2lx(block_hash), block_height)) # Save commitments to disk that have reached min_confirmations confirmed_tx = self.txs_waiting_for_confirmation.pop( block_height - self.min_confirmations + 1, None) if confirmed_tx is not None: self.__save_confirmed_timestamp_tx(confirmed_tx) # If there already are txs waiting for confirmation at this # block_height, there was a reorg and those pending commitments now # need to be added back to the pool reorged_tx = self.txs_waiting_for_confirmation.pop( block_height, None) if reorged_tx is not None: # FIXME: the reorged transaction might get mined in another # block, so just adding the commitments for it back to the pool # isn't ideal, but it is safe logging.info( 'tx %s at height %d removed by reorg, adding %d commitments back to pending' % (b2lx(reorged_tx.tx.GetTxid()), block_height, len(reorged_tx.commitment_timestamps))) for reorged_commitment_timestamp in reorged_tx.commitment_timestamps: self.pending_commitments.add( reorged_commitment_timestamp.msg) # Check if this block contains any of the pending transactions block = None while block is None: try: block = proxy.getblock(block_hash) except KeyError: # Must have been a reorg or something, return logging.error("Failed to get block") return except BrokenPipeError: logging.error("BrokenPipeError to get block") time.sleep(5) proxy = bitcoin.rpc.Proxy() # the following is an optimization, by pre computing the tx_id we rapidly check if our unconfirmed tx # is in the block block_txids = set(tx.GetTxid() for tx in block.vtx) # Check all potential pending txs against this block. # iterating in reverse order to prioritize most recent digest which commits to a bigger merkle tree for unconfirmed_tx in self.unconfirmed_txs[::-1]: if unconfirmed_tx.tx.GetTxid() not in block_txids: continue confirmed_tx = unconfirmed_tx # Success! Found tx block_timestamp = make_timestamp_from_block_tx( confirmed_tx, block, block_height) logging.info("Found commitment %s in tx %s" % (b2x(confirmed_tx.tip_timestamp.msg), b2lx(confirmed_tx.tx.GetTxid()))) # Success! (tip_timestamp, commitment_timestamps) = self.__pending_to_merkle_tree( confirmed_tx.n) mined_tx = TimestampTx(confirmed_tx.tx, tip_timestamp, commitment_timestamps) assert tip_timestamp.msg == unconfirmed_tx.tip_timestamp.msg mined_tx.tip_timestamp.merge(block_timestamp) logging.debug("Removing %d commitments from pending" % (unconfirmed_tx.n)) for commitment in tuple( self.pending_commitments)[0:unconfirmed_tx.n]: self.pending_commitments.remove(commitment) assert self.min_confirmations > 1 logging.info( "Success! %d commitments timestamped, now waiting for %d more confirmations" % (len(mined_tx.commitment_timestamps), self.min_confirmations - 1)) # Add pending_tx to the list of timestamp transactions that # have been mined, and are waiting for confirmations. self.txs_waiting_for_confirmation[block_height] = mined_tx # Erase all unconfirmed txs, as they all conflict with each other self.unconfirmed_txs.clear() # And finally, we can reset the last time a timestamp # transaction was mined to right now. self.last_timestamp_tx = time.time() break time_to_next_tx = int(self.last_timestamp_tx + self.min_tx_interval * random.uniform(1, 2) - time.time()) if time_to_next_tx > 0: # Minimum interval between transactions hasn't been reached, so do nothing logging.debug("Waiting %ds before next tx" % time_to_next_tx) return if not self.pending_commitments: logging.debug("No pending commitments, no tx needed") return if self.unconfirmed_txs: (prev_tx, prev_tip_timestamp, prev_commitment_timestamps) = self.unconfirmed_txs[-1] else: # first tx of a new cycle # Find the biggest unspent output that's confirmed unspent = find_unspent(proxy) if not len(unspent): logging.error("Can't timestamp; no spendable outputs") return change_addr = proxy._call("getnewaddress", "", "bech32") change_addr_info = proxy._call("getaddressinfo", change_addr) change_addr_script = x(change_addr_info['scriptPubKey']) prev_tx = self.__create_new_timestamp_tx_template( unspent[-1]['outpoint'], unspent[-1]['amount'], change_addr_script) logging.debug('New timestamp tx, spending output %r, value %s' % (unspent[-1]['outpoint'], str_money_value(unspent[-1]['amount']))) (tip_timestamp, commitment_timestamps) = self.__pending_to_merkle_tree( len(self.pending_commitments)) logging.debug("New tip is %s" % b2x(tip_timestamp.msg)) # make_merkle_tree() seems to take long enough on really big adds # that the proxy dies proxy = bitcoin.rpc.Proxy() sent_tx = None relay_feerate = self.relay_feerate while sent_tx is None: unsigned_tx = self.__update_timestamp_tx(prev_tx, tip_timestamp.msg, proxy.getblockcount(), relay_feerate) fee = _get_tx_fee(unsigned_tx, proxy) if fee is None: logging.debug("Can't determine txfee of transaction; skipping") return if fee > self.max_fee: logging.error("Maximum txfee reached!") return r = proxy.signrawtransaction(unsigned_tx) if not r['complete']: logging.error("Failed to sign transaction! r = %r" % r) return signed_tx = r['tx'] try: proxy.sendrawtransaction(signed_tx) except bitcoin.rpc.JSONRPCError as err: if err.error['code'] == -26: logging.debug("Err: %r" % err.error) # Insufficient priority - basically means we didn't # pay enough, so try again with a higher feerate relay_feerate *= 2 continue else: raise err # something else, fail! sent_tx = signed_tx if self.unconfirmed_txs: logging.info( "Sent timestamp tx %s, replacing %s; %d total commitments; %d prior tx versions" % (b2lx(sent_tx.GetTxid()), b2lx(prev_tx.GetTxid()), len(commitment_timestamps), len(self.unconfirmed_txs))) else: logging.info("Sent timestamp tx %s; %d total commitments" % (b2lx(sent_tx.GetTxid()), len(commitment_timestamps))) self.unconfirmed_txs.append( UnconfirmedTimestampTx(sent_tx, tip_timestamp, len(commitment_timestamps)))
def parse_match_msg(self, msg, version, ntp, con=None): #Unpack values. version, ntp, rpc, contract_msg, our_handshake_msg, their_handshake_msg, collateral_info, instance_id, ip_addr, port = msg.split(" ") collateral_info = base64.b64decode(collateral_info.encode("ascii")).decode("utf-8") contract_msg = base64.b64decode(contract_msg.encode("ascii")).decode("utf-8") contract_hash = hashlib.sha256(contract_msg.encode("ascii")).hexdigest() our_handshake_msg = base64.b64decode(our_handshake_msg.encode("ascii")).decode("utf-8") our_handshake = self.parse_handshake_msg(our_handshake_msg) their_handshake_msg = base64.b64decode(their_handshake_msg.encode("ascii")).decode("utf-8") their_handshake = self.parse_handshake_msg(their_handshake_msg) contract = self.hybrid_protocol.parse_contract(contract_msg) print(collateral_info) #Does the instance exist? if instance_id not in self.instances: print(self.instances) print(instance_id) print("Instance doesn't exist mate.") return [] else: #Record IP address. self.instances[instance_id]["con"]["ip_addr"] = ip_addr self.instances[instance_id]["con"]["port"] = port #Are the handshake hashes valid? if our_handshake["contract_hash"] != their_handshake["contract_hash"]: print("errro 234324234") return [] #What are you doing? if our_handshake["contract_hash"] != contract_hash: print("erorr 34545435") return [] #Nice try. #Check "our handshake" is valid. actor = None ecdsa_pairs = [ contract["buyer"]["ecdsa"][0], contract["seller"]["ecdsa"][0] ] our_handshake_msg = our_handshake_msg.split(" ") our_handshake_sig = our_handshake_msg.pop() for ecdsa_pair in ecdsa_pairs: if ecdsa_pair.valid_signature(our_handshake_sig, " ".join(our_handshake_msg)): if ecdsa_pair.get_public_key() == contract["buyer"]["ecdsa"][0].get_public_key(): actor = "buyer" else: actor = "seller" ecdsa_pairs.remove(ecdsa_pair) break if len(ecdsa_pairs) == 2: print("Invalid handshake 1.") return [] #Check "their handshake" is valid. their_handshake_msg = their_handshake_msg.split(" ") their_handshake_sig = their_handshake_msg.pop() if not ecdsa_pairs[0].valid_signature(their_handshake_sig, " ".join(their_handshake_msg)): print("Invalid handshake 2.") return [] #Check signed collateral info msg. chunk_size, arbiter_pub, sig_1, pub_1, sig_2, pub_2 = collateral_info.split(" ") if C(chunk_size) != contract["seller"]["chunk_size"] and C(chunk_size) != contract["buyer"]["chunk_size"]: print("errror 55556") return [] if pub_1 != contract[actor]["ecdsa"][0].get_public_key(): print("errorr 55557") return [] collateral_unsigned = "%s %s" % (chunk_size, arbiter_pub) if not contract[actor]["ecdsa"][0].valid_signature(sig_1, collateral_unsigned): print("errorr 55558") return [] if not contract[actor]["ecdsa"][1].valid_signature(sig_2, collateral_unsigned): print("errror 55559") return [] #Record proof they agree on the transfer size. print("Storing collateral_info") print(contract_hash) print(instance_id) self.instances[instance_id]["collateral_info"][contract_hash] = collateral_info print(self.instances) #Check whether contract has any existing satisfied setup TXs. #As in, someone who has setup the right outputs in an existing setup tx. sides = { "buyer": "seller", "seller": "buyer" } found = {} for side in list(sides): #Have we seen this before? deposit_txid = contract[side]["deposit_txid"] if not deposit_txid in self.green_addresses: continue #Find a valid contract output. instance = self.green_addresses[deposit_txid] index = 1 for vout in instance["contract"]["vouts"]: #Is this our contract? ecdsa_us = contract[side]["ecdsa"] ecdsa_them = contract[sides[side]]["ecdsa"] ecdsa_arbiter = ECDSACrypt(self.config["arbiter_key_pairs"][0]["pub"]) redeem_script = bond_redeem_script(ecdsa_us, ecdsa_them, ecdsa_arbiter) redeem_script_hash160 = hash160_script(redeem_script) p2sh_script_pub_key = CScript([OP_HASH160, redeem_script_hash160["bin"], OP_EQUAL]) if vout.scriptPubKey != p2sh_script_pub_key: index += 1 continue #Do the amounts add up? vout_amount = Decimal(str_money_value(vout.nValue)) if side == "buyer": calibrated_amount = contract["trade"].total.as_decimal calibrated_amount -= contract["buyer"]["chunk_size"].as_decimal else: calibrated_amount = contract["trade"].amount.as_decimal calibrated_amount -= contract["seller"]["chunk_size"].as_decimal if vout_amount < calibrated_amount: print("Insufficent vout_amount.") index += 1 continue #Is there a valid collateral amount? if instance["collateral_info"] == {}: continue #Record details. found[side] = { "instance": instance, "vout": vout } #Save chunk size details. instance["chunks"][index] = {} instance["chunks"][index]["buyer"] = contract["buyer"]["chunk_size"] instance["chunks"][index]["seller"] = contract["seller"]["chunk_size"] break #Both vouts found -- check they both work. if len(list(found)) == 2: print("Both vouts found.") works = found #Lock the outputs in both setups. broadcast = [] for side in list(works): instance = works[side]["instance"] vout = works[side]["vout"] #Lock vouts. index = instance["contract"]["vouts"].index(vout) + 1 if vout not in instance["locked"]: """ The magic + 1 is because vout 0 is already for the fee output. Everything after that is the contract followed by a possible change -- this keeps the vout index in the correct order relative to the TX. """ instance["locked"].append(index) #Their index. their_instance = works[sides[side]]["instance"] their_vout = works[sides[side]]["vout"] their_index = their_instance["contract"]["vouts"].index(their_vout) + 1 #Save instance references for matched contract. their_instance_id = their_instance["id"] instance["contract"]["links"][index] = { "instance_id": their_instance_id, "index": their_index, "contract_hash": contract_hash, } #This means its ready to broadcast. if len(instance["contract"]["vouts"]) == len(instance["locked"]): """ The old TXID for the unsigned transaction is intentionally used to make the client-side logic easier for parsing status messages. Basically, it will tell the client which signed transaction corresponds to their original unsigned transaction and hence which coin client to broadcast it to. """ tx_info = { "tx": instance["setup"]["signed"]["tx"], "txid": instance["setup"]["txid"] } broadcast.append(tx_info) #All inputs locked - broadcast setup TX. if len(broadcast) == 2: print(broadcast[0]) print(broadcast[1]) print("Broadcasting bruh.") #Check fee output is correct. trade_fee = C(self.config["trade_fee"]) valid_fees = 1 for side in list(works): instance = works[side]["instance"] collateral = C(0) index = 1 #Add up micro-collateral amounts. for vout in instance["contract"]["vouts"]: #Have the chunk amounts been recorded? if index not in instance["chunks"]: break chunks = instance["chunks"][index] if "buyer" not in chunks: break if "seller" not in chunks: break #Calculate relative chunk size. vout_amount = Decimal(str_money_value(vout.nValue)) collateral += chunks[side] #Next contract vout. index += 1 #Check trade fee. total_coins = C(instance["fee"]["total"]) + instance["contract"]["total"] change = C(instance["change"]["total"]) expected_fee = collateral + ((total_coins - change) * trade_fee) #Check fee output amount. if expected_fee != C(instance["fee"]["total"]): print("Invalid fees") print(expected_fee) print(C(instance["fee"]["total"])) valid_fees = 0 #Broadcast. if valid_fees: #There's no circuit checks or anything at this point. #This is a prototype. for side in list(works): #Build ready message. instance = works[side]["instance"] ready_msg = self.new_ready_msg(instance["id"], instance["setup"]["sig_3"], instance["contract"]["links"]) print(ready_msg) #Send message to client. ident = instance["con"]["ip_addr"] + ":" + str(instance["con"]["port"]) if ident in self.clients: print("Successfully broadcast") self.clients[ident].send_line(ready_msg) else: print("Did not broadcast - invalid fees") return []