def __loop(self): logging.info("Starting stamper loop") journal = Journal(self.calendar.path + "/journal") try: with open(self.calendar.path + "/journal.known-good", "r") as known_good_fd: idx = int(known_good_fd.read().strip()) except FileNotFoundError as exp: idx = 0 while not self.exit_event.is_set(): self.__do_bitcoin() try: commitment = journal[idx] except KeyError: self.exit_event.wait(1) continue # Is this commitment already stamped? if commitment in self.calendar: logging.debug("Commitment %s (idx %d) already stamped" % (b2x(commitment), idx)) idx += 1 continue self.pending_commitments.add(commitment) logging.debug( "Added %s (idx %d) to pending commitments; %d total" % (b2x(commitment), idx, len(self.pending_commitments)) ) idx += 1
def test_transaction_deserialization_and_serialization(self): # Transaction from Decred unit test. raw_tx = x('01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff00ffffffff0200f2052a01000000abab434104d64bdfd09eb1c5fe295abdeb1dca4281be988e2da0b6c1c6a59dc226c28624e18175e851c96b973d81b01cc31f047834bc06d6d6edf620d184241a6aed8b63a6ac00e1f50500000000bcbc434104d64bdfd09eb1c5fe295abdeb1dca4281be988e2da0b6c1c6a59dc226c28624e18175e851c96b973d81b01cc31f047834bc06d6d6edf620d184241a6aed8b63a6ac00000000000000000112121212121212121515151534343434070431dc001b0162') tx = Transaction.deserialize(raw_tx) self.assertEqual(1, tx.version) self.assertEqual(0, tx.locktime) self.assertEqual(0, tx.expiry) # Test input. self.assertEqual(1, len(tx.txins)) txin = tx.txins[0] self.assertEqual(1302123111085380114, txin.value) self.assertEqual(353703189, txin.block_height) self.assertEqual(875836468, txin.block_index) self.assertEqual(x('0431dc001b0162'), txin.sig_script) self.assertEqual(4294967295, txin.sequence) self.assertEqual(2, len(tx.txouts)) # Test output 0. txout = tx.txouts[0] self.assertEqual(5000000000, txout.value) self.assertEqual(43947, txout.version) self.assertEqual(x('4104d64bdfd09eb1c5fe295abdeb1dca4281be988e2da0b6c1c6a59dc226c28624e18175e851c96b973d81b01cc31f047834bc06d6d6edf620d184241a6aed8b63a6ac'), txout.pk_script) # Test output 1. txout = tx.txouts[1] self.assertEqual(100000000, txout.value) self.assertEqual(48316, txout.version) self.assertEqual(x('4104d64bdfd09eb1c5fe295abdeb1dca4281be988e2da0b6c1c6a59dc226c28624e18175e851c96b973d81b01cc31f047834bc06d6d6edf620d184241a6aed8b63a6ac'), txout.pk_script) self.assertEqual(b2x(raw_tx), b2x(tx.serialize()))
def do_claim(self): """Replace unsigned outputs.""" if not self.tx: self.error('Invalid or nonexistent transaction.') return dest = str(self.destination_edit.text()) hash160 = dest # Try to decode address try: raw = CBase58Data(dest).to_bytes() hash160 = '0x' + b2x(raw) except Exception: # Try to parse hash160 if len(dest.replace('0x', '')) == 40: hash160 = dest if not is_hex(hash160) and len(hash160.replace('0x', '')) == 40: self.error('Could not parse destination: %s' % hash160) return if not get_unsigned_outputs(self.tx): self.error('There are no unsigned outputs.') return new_tx = replace_outputs(self.tx, hash160) self.result_edit.setPlainText(b2x(new_tx.serialize())) self.info('Successfully altered outputs: %s' % unsigned_outputs)
def stream_deserialize(cls, f, protover=PROTO_VERSION): recvbuf = ser_read(f, 4 + 12 + 4 + 4) # check magic if recvbuf[:4] != bitcoin.params.MESSAGE_START: raise ValueError( "Invalid message start '%s', expected '%s'" % (b2x(recvbuf[:4]), b2x(bitcoin.params.MESSAGE_START)) ) # remaining header fields: command, msg length, checksum command = recvbuf[4 : 4 + 12].split(b"\x00", 1)[0] msglen = struct.unpack(b"<i", recvbuf[4 + 12 : 4 + 12 + 4])[0] checksum = recvbuf[4 + 12 + 4 : 4 + 12 + 4 + 4] # read message body recvbuf += ser_read(f, msglen) msg = recvbuf[4 + 12 + 4 + 4 : 4 + 12 + 4 + 4 + msglen] th = hashlib.sha256(msg).digest() h = hashlib.sha256(th).digest() if checksum != h[:4]: raise ValueError("got bad checksum %s" % repr(recvbuf)) if command in messagemap: cls_map = messagemap[command] # print("Going to deserialize '%s'" % msg) return cls_map.msg_deser(_BytesIO(msg)) else: print("Command '%s' not in messagemap" % repr(command)) return None
def cmd_witnessinfo(args): witness = BitcoinSealWitness.deserialize(args.witness_fd.read()) print('Hash:\t\t%s' % b2x(witness.hash)) print('Txid:\t\t%s' % b2lx(witness.txinproof.txproof.txhash)) print('Seal Hash:\t%s' % b2x(witness.seal.hash)) print('Seal OutPoint:\t%s:%d' % (b2x(witness.seal.outpoint.hash), witness.seal.outpoint.n))
def data(self, index, role = Qt.DisplayRole): if not index.isValid() or self.extkey is None: return None data = None col = index.column() key = self.extkey if role in [Qt.DisplayRole, Qt.EditRole]: if col == self.DEPTH: data = key.tree_depth() elif col == self.FPRINT: data = b2x(key.fingerprint()) elif col == self.CHILD_NUM: data = key.child_index() elif col == self.CHAINCODE: data = b2x(key.chain_code()) elif col == self.PUBKEY: data = key.sec_as_hex() elif col == self.PARENT_FPRINT: data = b2x(key.parent_fingerprint()) elif col == self.IS_PRIVATE: data = key.is_private() elif col == self.PRIVKEY: data = '%x' % key.secret_exponent() if key.is_private() else '' elif col == self.BASE58: data = key.as_text() return QVariant(data)
def __commitment_to_filename(self, commitment): assert len(commitment) >= 20 return (self.path + '/' + b2x(commitment[0:1]) + '/' + b2x(commitment[1:2]) + '/' + b2x(commitment[2:3]) + '/' + b2x(commitment[3:4]) + '/' + b2x(commitment))
def stream_deserialize(cls, f, protover=PROTO_VERSION): recvbuf = ser_read(f, 4 + 12 + 4 + 4) # check magic if recvbuf[:4] != bitcoin.params.MESSAGE_START: raise ValueError("Invalid message start '%s', expected '%s'" % (b2x(recvbuf[:4]), b2x(bitcoin.params.MESSAGE_START))) # remaining header fields: command, msg length, checksum command = recvbuf[4:4+12].split(b"\x00", 1)[0] msglen = struct.unpack(b"<i", recvbuf[4+12:4+12+4])[0] checksum = recvbuf[4+12+4:4+12+4+4] return MsgHeader(command, msglen, checksum)
def verify_command(args): ctx = StreamDeserializationContext(args.timestamp_fd) try: detached_timestamp = DetachedTimestampFile.deserialize(ctx) except BadMagicError: logging.error("Error! %r is not a timestamp file." % args.timestamp_fd.name) sys.exit(1) except DeserializationError as exp: logging.error("Invalid timestamp file %r: %s" % (args.timestamp_fd.name, exp)) sys.exit(1) if args.hex_digest is not None: try: digest = binascii.unhexlify(args.hex_digest.encode('utf8')) except ValueError: args.parser.error('Digest must be hexadecimal') if not digest == detached_timestamp.file_digest: logging.error("Digest provided does not match digest in timestamp, %s (%s)" % (b2x(detached_timestamp.file_digest), detached_timestamp.file_hash_op_class.TAG_NAME)) sys.exit(1) else: if args.target_fd is None: # Target not specified, so assume it's the same name as the # timestamp file minus the .ots extension. if not args.timestamp_fd.name.endswith('.ots'): args.parser.error('Timestamp filename does not end in .ots') target_filename = args.timestamp_fd.name[:-4] logging.info("Assuming target filename is %r" % target_filename) try: args.target_fd = open(target_filename, 'rb') except IOError as exp: logging.error('Could not open target: %s' % exp) sys.exit(1) logging.debug("Hashing file, algorithm %s" % detached_timestamp.file_hash_op.TAG_NAME) actual_file_digest = detached_timestamp.file_hash_op.hash_fd(args.target_fd) logging.debug("Got digest %s" % b2x(actual_file_digest)) if actual_file_digest != detached_timestamp.file_digest: logging.debug("Expected digest %s" % b2x(detached_timestamp.file_digest)) logging.error("File does not match original!") sys.exit(1) if not verify_timestamp(detached_timestamp.timestamp, args): sys.exit(1)
def main(): ''' Our main function. ''' SelectParams('mainnet') decoded_transaction = eval(open('transaction_to_sign.txt').read()) # pylint: disable=eval-used txin_txid = lx(decoded_transaction['vin'][0]['txid']) txin_vout = 0 tx_in = CMutableTxIn(COutPoint(txin_txid, txin_vout)) tx_out = [] for idx in range(len(decoded_transaction['vout'])): satoshis = int(COIN * decoded_transaction['vout'][idx]['value']) script_pub_key = CScript(bytes.fromhex(decoded_transaction['vout'][idx]['scriptPubKey']['hex'])) tx_out.append(CMutableTxOut(satoshis, script_pub_key)) tx_to_spend = CMutableTransaction([tx_in], tx_out) priv_1 = CBitcoinSecret.from_secret_bytes(bytes.fromhex(PRIV_HEX_1)) priv_2 = CBitcoinSecret.from_secret_bytes(bytes.fromhex(PRIV_HEX_2)) txin_redeem_script = CScript(bytes.fromhex(decoded_transaction['vin'][0]['scriptSig']['hex'])) # Input 0 is fixed. sighash = SignatureHash(txin_redeem_script, tx_to_spend, 0, SIGHASH_ALL) signatures = [] for priv in [priv_1, priv_2]: signatures.append(priv.sign(sighash) + bytes([SIGHASH_ALL])) tx_in.scriptSig = CScript([CScriptOp(0x00), signatures[0], signatures[1], txin_redeem_script]) # script_pub_key Defined in cycle. VerifyScript(tx_in.scriptSig, txin_redeem_script, tx_to_spend, 0, (SCRIPT_VERIFY_P2SH,)) print(b2x(tx_to_spend.serialize()))
def check_refund_works(self, tx_hex, owner_first_sig, owner_second_sig, recipient_sig, actor): global error_log_path try: tx = CTransaction.deserialize(binascii.unhexlify(tx_hex)) redeem_script = bond_redeem_script(self.ecdsa_us, self.ecdsa_them, self.factory.ecdsa_arbiters[0], actor) redeem_script_hash160 = hash160_script(redeem_script) print(tx_hex) print(redeem_script) tx.vin[0].scriptSig = CScript([OP_0, owner_first_sig, owner_second_sig, recipient_sig, redeem_script["bin"]]) p2sh_script_pub_key = CScript([OP_HASH160, redeem_script_hash160["bin"], OP_EQUAL]) print(redeem_script_hash160) VerifyScript(tx.vin[0].scriptSig, p2sh_script_pub_key, tx, 0, (SCRIPT_VERIFY_P2SH,)) signed_tx_hex = b2x(tx.serialize()) return { "tx_hex": signed_tx_hex, "txid": calculate_txid(signed_tx_hex) } except Exception as e: error = parse_exception(e) log_exception(error_log_path, error) print(error) print("Check refund failed.") return None
def sign_transaction(self): """Sign the transaction.""" script, txTo, inIdx, hash_type = self.model.get_fields() if inIdx >= len(txTo.vin): self.set_result_message('Nonexistent input specified for signing.', error=True) return if not script: self.set_result_message('Invalid output script.', error=True) return privkey = self.get_private_key() if not privkey: self.set_result_message('Could not parse private key.', error=True) return sig_hash = chainparams.signature_hash(script, txTo, inIdx, hash_type) sig = privkey.sign(sig_hash) hash_type_hex = format_hex_string(hex(hash_type), with_prefix=False).decode('hex') sig = sig + hash_type_hex txTo.vin[inIdx].scriptSig = Script([sig, privkey.pub]) if self.verify_script.isChecked(): # Try verify try: VerifyScript(txTo.vin[inIdx].scriptSig, script, txTo, inIdx, (SCRIPT_VERIFY_P2SH,)) except Exception as e: self.set_result_message('Error when verifying: %s' % str(e), error=True) return self.dock.deserialize_raw(b2x(txTo.serialize())) # Deserializing a tx clears the model, so re-populate. self.model.set_fields(script=script.get_human(), inIdx=inIdx, hashType=hash_type) self.set_result_message('Successfully set scriptSig for input %d (SigHash type: %s).' % (inIdx, sig_hash_name(hash_type)))
def data(self, index, role = Qt.DisplayRole): if not index.isValid(): return None if role not in [Qt.DisplayRole, Qt.ToolTipRole, Qt.EditRole]: return None data = None c = index.column() if c == 0: if self.utxo_script: data = self.utxo_script.get_human() elif c == 1: if self.tx: data = b2x(self.tx.serialize()) elif c == 2: data = self.inIdx elif c == 3: data = sighash_types_by_value[self.sighash_type] elif c == 4: if role == Qt.CheckStateRole: data = Qt.Checked if self.anyone_can_pay else Qt.Unchecked else: data = self.anyone_can_pay elif c == self.SigHashName: data = sig_hash_name(self.sighash_type | SIGHASH_ANYONECANPAY if self.anyone_can_pay else self.sighash_type) elif c == self.SigHashExplanation: data = sig_hash_explanation(self.sighash_type | SIGHASH_ANYONECANPAY if self.anyone_can_pay else self.sighash_type) return data
def insight_parse_raw_tx(res): version = int(res.get('version')) locktime = int(res.get('locktime')) vin = [] vout = [] for i in res.get('vin'): prev_txid = i['txid'] prev_n = int(i['n']) seq = int(i['sequence']) script_asm = i['scriptSig']['asm'] script = Script.from_human(script_asm) tx_outpoint = COutPoint(lx(prev_txid), prev_n) tx_input = CTxIn(tx_outpoint, x(script.get_hex()), seq) vin.append(tx_input) for o in res.get('vout'): value = float(o['value']) value = int(value * pow(10, 8)) script_asm = o['scriptPubKey']['asm'] script = Script.from_human(script_asm) tx_output = CTxOut(value, x(script.get_hex())) vout.append(tx_output) tx = Transaction(vin, vout, locktime, version) return b2x(tx.serialize())
def abe_parse_raw_tx(res): version = int(res.get('ver')) locktime = int(res.get('lock_time')) vin = [] vout = [] for i in res.get('in'): prev_txid = i['prev_out']['hash'] prev_n = int(i['prev_out']['n']) tx_outpoint = COutPoint(lx(prev_txid), prev_n) scriptSig = Script(x( i['raw_scriptSig'] )) sequence = int(i['sequence']) tx_input = CTxIn(tx_outpoint, x(scriptSig.get_hex()), sequence) vin.append(tx_input) for o in res.get('out'): value = float(o['value']) value = int(value * pow(10, 8)) script = Script(x( o['raw_scriptPubKey'] )) tx_output = CTxOut(value, x(script.get_hex())) vout.append(tx_output) tx = Transaction(vin, vout, locktime, version) return b2x(tx.serialize())
def cmd_mkclosetx(args): seals = collections.OrderedDict() for seal_fd in args.seal_fds: seal = BitcoinSingleUseSeal.deserialize(seal_fd.read()) if seal in seals: args.parser.error("Duplicate seal: '%s' duplicates '%s'" % \ (seal_fd.name, seals[seal].name)) seals[seal] = seal_fd logging.debug('Closing seal %s on outpoint %s:%d' % \ (b2x(seal.hash), b2lx(seal.outpoint.hash), seal.outpoint.n)) seals = seals.keys() close_tx = make_close_seal_tx_template(args.digest, *seals, meth=args.meth) print(b2x(close_tx.serialize()))
def fundrawtransaction(self, given_transaction, *args, **kwargs): """ Make up some inputs for the given transaction. """ # just use any txid here vintxid = lx("99264749804159db1e342a0c8aa3279f6ef4031872051a1e52fb302e51061bef") if isinstance(given_transaction, str): given_bytes = x(given_transaction) elif isinstance(given_transaction, CMutableTransaction): given_bytes = given_transaction.serialize() else: raise FakeBitcoinProxyException("Wrong type passed to fundrawtransaction.") # this is also a clever way to not cause a side-effect in this function transaction = CMutableTransaction.deserialize(given_bytes) for vout_counter in range(0, self._num_fundrawtransaction_inputs): txin = CMutableTxIn(COutPoint(vintxid, vout_counter)) transaction.vin.append(txin) # also allocate a single output (for change) txout = make_txout() transaction.vout.append(txout) transaction_hex = b2x(transaction.serialize()) return {"hex": transaction_hex, "fee": 5000000}
def as_tx(self): sum_in = sum(prevtx.nValue for _,prevtx,_ in self.prevouts) sig_size = sum(redeemer.spendbytes for _,_,redeemer in self.prevouts) tx_size = (4 + # version field 2 + # # of txins len(self.prevouts) * 41 + # txins, excluding sigs sig_size + # txins, sigs only 1 + # # of txouts 34 + # txout 4 # nLockTime field ) feerate = int(self.proxy._call('estimatefee', 1) * COIN) # satoshi's per KB if feerate <= 0: feerate = 10000 fees = int(tx_size * feerate / 1000) tx = CMutableTransaction( [CTxIn(outpoint, nSequence=0) for outpoint,_,_ in self.prevouts], [CTxOut(sum_in - fees, self.payto.to_scriptPubKey())], 0) for n,(_,_,redeemer) in enumerate(self.prevouts): redeemer.mutate_spend(tx, n) unsigned_tx = CTransaction.from_tx(tx) for n,(_,_,redeemer) in enumerate(self.prevouts): txin = CMutableTxIn.from_txin(tx.vin[n]) txin.scriptSig = redeemer.sign_spend(unsigned_tx, n) tx.vin[n] = CTxIn.from_txin(txin) print(b2x(tx.serialize()))
def header_from_insight_block(d): version = int(d['version']) prev_block = lx(d['previousblockhash']) merkle_root = lx(d['merkleroot']) time = int(d['time']) bits = int(d['bits'], 16) nonce = int(d['nonce']) return b2x(CBlockHeader(version, prev_block, merkle_root, time, bits, nonce).serialize())
def txs_context_menu(self, position): menu = QMenu() if self.block: selected = self.block_widget.txs_widget.view.selectedIndexes()[0] r = selected.row() tx = self.block.vtx[r] raw_tx = b2x(tx.serialize()) self.handler.add_plugin_actions(self, menu, RAW_TX, raw_tx) menu.exec_(self.block_widget.txs_widget.view.viewport().mapToGlobal(position))
def decode_address(self): txt = str(self.address_line.text()) try: addr_bytes, version = decode_address(txt) except Exception: self.hash_line.setText('Could not decode address.') self.addr_version.setValue(0) return self.hash_line.setText(b2x(addr_bytes)) self.addr_version.setValue(version)
def mutate_tx(tx_hex): """ Mutates a raw transaction using TX malleability in the scriptSig (specifically, the OP codes.) This function shouldn't be used beyond testing as it uses an ugly eval() hack. https://en.bitcoin.it/wiki/Transaction_Malleability """ tx = CTransaction.deserialize(binascii.unhexlify(tx_hex)) script_sig = repr(tx.vin[0].scriptSig)[9:] script_sig = eval("CScript([OP_1, OP_DROP, " + script_sig) tx.vin[0].scriptSig = script_sig return b2x(tx.serialize())
def str_result(verb, parameter, result): rr = "" if verb > 0 and result is not None: rr += " == " result_hex = b2x(result) if parameter is not None: parameter_hex = b2x(parameter) try: index = result_hex.index(parameter_hex) parameter_hex_highlight = bcolors.BOLD + parameter_hex + bcolors.ENDC if index == 0: rr += parameter_hex_highlight + result_hex[index+len(parameter_hex):] else: rr += result_hex[0:index] + parameter_hex_highlight except ValueError: rr += result_hex else: rr += result_hex return rr
def main(): proxy = bitcoin.rpc.Proxy() assert len(sys.argv) > 1 digests = [] for f in sys.argv[1:]: try: with open(f, 'rb') as fd: digests.append(Hash(fd.read())) except FileNotFoundError as exp: if len(f)/2 in (20, 32): digests.append(x(f)) else: raise exp except IOError as exp: print(exp, file=sys.stderr) continue for digest in digests: unspent = sorted(proxy.listunspent(0), key=lambda _x: hash(_x['amount'])) txins = [CTxIn(unspent[-1]['outpoint'])] value_in = unspent[-1]['amount'] change_addr = proxy.getnewaddress() change_pubkey = proxy.validateaddress(change_addr)['pubkey'] change_out = CMutableTxOut(params.MAX_MONEY, CScript([change_pubkey, OP_CHECKSIG])) digest_outs = [CMutableTxOut(0, CScript([OP_RETURN, digest]))] txouts = [change_out] + digest_outs tx = CMutableTransaction(txins, txouts) FEE_PER_BYTE = 0.00025*COIN/1000 while True: tx.vout[0].nValue = int(value_in - max(len(tx.serialize()) * FEE_PER_BYTE, 0.00011*COIN)) r = proxy.signrawtransaction(tx) assert r['complete'] tx = r['tx'] if value_in - tx.vout[0].nValue >= len(tx.serialize()) * FEE_PER_BYTE: print(b2x(tx.serialize())) print(len(tx.serialize()), 'bytes', file=sys.stderr) print(b2lx(proxy.sendrawtransaction(tx))) break
def signrawtransaction(self, given_transaction): """ This method does not actually sign the transaction, but it does return a transaction based on the given transaction. """ if isinstance(given_transaction, str): given_bytes = x(given_transaction) elif isinstance(given_transaction, CMutableTransaction): given_bytes = given_transaction.serialize() else: raise FakeBitcoinProxyException("Wrong type passed to signrawtransaction.") transaction = CMutableTransaction.deserialize(given_bytes) transaction_hex = b2x(transaction.serialize()) return {"hex": transaction_hex}
def test_tx_valid(self): for prevouts, tx, enforceP2SH in load_test_vectors('tx_valid.json'): try: CheckTransaction(tx) except CheckTransactionError: self.fail('tx failed CheckTransaction(): ' \ + str((prevouts, b2x(tx.serialize()), enforceP2SH))) continue for i in range(len(tx.vin)): flags = set() if enforceP2SH: flags.add(SCRIPT_VERIFY_P2SH) VerifyScript(tx.vin[i].scriptSig, prevouts[tx.vin[i].prevout], tx, i, flags=flags)
def parse_script(text): try: parsed = bitcoin.core.script.CScript(x(text)) except: pass if parsed.is_valid(): # return as array of strings as_array = [] for i in parsed: if isinstance(i, int): as_array.append(str(i)) else: as_array.append(b2x(i)) return as_array else: return False
def choose_address(self): active_addresses = self.get_wallet_addresses() disp = [x[0] for x in active_addresses] active_address = self.prompt( disp, title='\nLocal wallet (non-zero balance) addresses:\n', choicemsg='\nWhich wallet number should I use? ' ) self.output('Address {} selected and active'.format(active_addresses[active_address][0])) retaddy, retbal = active_addresses[active_address][0], active_addresses[active_address][1] self.selected_address = retaddy self.selected_address_info = self.access.validateaddress(self.selected_address) self.selected_address_info['pubkey'] = b2x(self.selected_address_info['pubkey']) return retaddy, retbal
def maketx(tx): #txid from blockr. bitcoind does not support tx index! txid = lx(tx) vout = 0 outp = COutPoint(txid, vout) print ("output: %s"%outp) # Create the txin structure, which includes the outpoint txin = CMutableTxIn(outp) print (txin) txin_scriptPubKey = CScript([OP_DUP, OP_HASH160, Hash160(seckey.pub), OP_EQUALVERIFY, OP_CHECKSIG]) print (txin_scriptPubKey) amount = 0.001*COIN txout = CMutableTxOut(amount, CBitcoinAddress(a).to_scriptPubKey()) print (txout) # Create the unsigned transaction. newtx = CMutableTransaction([txin], [txout]) print (newtx) sighash = SignatureHash(txin_scriptPubKey, newtx, 0, SIGHASH_ALL) print (sighash) # Now sign it. We have to append the type of signature we want to the end, in # this case the usual SIGHASH_ALL. sig = seckey.sign(sighash) + bytes([SIGHASH_ALL]) print (sig) # Set the scriptSig of our transaction input appropriately. txin.scriptSig = CScript([sig, seckey.pub]) try: VerifyScript(txin.scriptSig, txin_scriptPubKey, newtx, 0, (SCRIPT_VERIFY_P2SH,)) except: pass print ('*'*20) print(b2x(newtx.serialize())) """
def set_fields(self, script=None, txTo=None, inIdx=None, hashType=None): """Populate model. Args: script (str): Human-readable script. txTo (Transaction): Transaction. inIdx (int): Input index. hashType (int): SigHash type. """ if script is not None: self.setData(self.index(0, 0), QVariant(script)) if txTo is not None: self.setData(self.index(0, 1), QVariant(b2x(txTo.serialize()))) if inIdx is not None: self.setData(self.index(0, 2), QVariant(inIdx)) if hashType is not None: self.setData(self.index(0, 3), QVariant(hashType & 0x1f), RawRole) self.setData(self.index(0, 4), QVariant(hashType & SIGHASH_ANYONECANPAY))
def _repr(o): if isinstance(o, bytes): return "<{}>".format(b2x(o)) else: return repr(o)
def loop(self): logging.info("Starting loop for %s" % self.calendar_url) try: logging.debug("Opening %s" % self.up_to_path) with open(self.up_to_path, 'r') as up_to_fd: last_known = int(up_to_fd.read().strip()) except FileNotFoundError as exp: last_known = -1 logging.info("Checking calendar " + str(self.calendar_url) + ", last_known commitment:" + str(last_known)) if self.btc_net == 'testnet': bitcoin.SelectParams('testnet') elif self.btc_net == 'regtest': bitcoin.SelectParams('regtest') while True: start_time = time.time() backup_url = urljoin(self.calendar_url, "/experimental/backup/%d" % (last_known + 1)) logging.debug("Asking " + str(backup_url)) try: r = requests.get(backup_url) except Exception as err: logging.error("Exception asking %s error message %s, sleeping for %d seconds" % (str(backup_url), str(err), SLEEP_SECS)) time.sleep(SLEEP_SECS) continue if r.status_code != 200: logging.info("%s not found, sleeping for %d seconds" % (backup_url, SLEEP_SECS)) time.sleep(SLEEP_SECS) continue kv_map = Backup.bytes_to_kv_map(r.content) attestations = {} ops = {} for key, value in kv_map.items(): # print("--- key=" + b2x(key) + " value=" + b2x(value)) ctx = BytesDeserializationContext(value) for _a in range(ctx.read_varuint()): attestation = TimeAttestation.deserialize(ctx) attestations[key] = attestation for _b in range(ctx.read_varuint()): op = Op.deserialize(ctx) ops[key] = op proxy = bitcoin.rpc.Proxy() # Verify all bitcoin attestation are valid logging.debug("Total attestations: " + str(len(attestations))) for key, attestation in attestations.items(): if attestation.__class__ == BitcoinBlockHeaderAttestation: while True: try: blockhash = proxy.getblockhash(attestation.height) block_header = proxy.getblockheader(blockhash) # the following raise an exception and block computation if the attestation does not verify attested_time = attestation.verify_against_blockheader(key, block_header) logging.debug("Verifying " + b2x(key) + " result " + str(attested_time)) break except Exception as err: logging.info("%s - error contacting bitcoin node, sleeping..." % (err)) time.sleep(SLEEP_SECS) proxy = bitcoin.rpc.Proxy() # verify all ops connects to an attestation logging.debug("Total ops: " + str(len(ops))) for key, op in ops.items(): current_key = key current_op = op while True: next_key = current_op(current_key) if next_key in ops: current_key = next_key current_op = ops[next_key] else: break assert next_key in attestations batch = leveldb.WriteBatch() for key, value in kv_map.items(): batch.Put(key, value) self.db.db.Write(batch, sync=True) last_known = last_known + 1 try: with open(self.up_to_path, 'w') as up_to_fd: up_to_fd.write('%d\n' % last_known) except FileNotFoundError as exp: logging.error(str(exp)) break elapsed_time = time.time() - start_time logging.info("Took %ds for %s" % (elapsed_time, str(backup_url)))
def __loop(self): logging.info("Starting stamper loop") journal = Journal(self.calendar.path + '/journal') try: with open(self.calendar.path + '/journal.known-good', 'r') as known_good_fd: idx = int(known_good_fd.read().strip()) except FileNotFoundError as exp: idx = 0 while not self.exit_event.is_set(): # Get all pending commitments while len(self.pending_commitments) < self.max_pending: try: commitment = journal[idx] except KeyError: break # Is this commitment already stamped? if commitment not in self.calendar: self.pending_commitments.add(commitment) if idx % 1000 == 0: logging.debug( 'Added %s (idx %d) to pending commitments; %d total' % (b2x(commitment), idx, len(self.pending_commitments))) else: if idx % 10000 == 0: logging.debug('Commitment at idx %d already stamped' % idx) idx += 1 self.journal_cursor = idx try: self.__do_bitcoin() except bitcoin.rpc.InWarmupError as warmuperr: logging.info("Bitcoincore is warming up: %r" % warmuperr) time.sleep(5) except ValueError as err: # If not caused by misconfiguration this error in bitcoinlib # usually occurs when bitcoincore is not started if str(err).startswith('Cookie file unusable'): logging.error( "Proxy Authentication Error: Is bitcoincore running?: %r" % err) time.sleep(5) else: logging.error("__do_bitcoin() failed: %r" % exp, exc_info=True) except Exception as exp: # !@#$ Python. # # Just logging errors like this is garbage, but we don't really # know all the ways that __do_bitcoin() will raise an exception # so easiest just to ignore and continue onwards. # # Mainly Bitcoin Core has been hanging up on our RPC # connection, and python-bitcoinlib doesn't have great handling # of that. In our case we should be safe to just retry as # __do_bitcoin() is fairly self-contained. logging.error("__do_bitcoin() failed: %r" % exp, exc_info=True) self.exit_event.wait(1)
def __str__(self): return "TxOut({}, {}, {}, {})".format(b2x(self.tx), self.nout, self.addr, str_money_value(self.value))
value_in += new_amount change_txout.nValue += new_amount value_out += new_amount tx.vin.append(new_txin) # re-sign the tx so we can figure out how large the new input's scriptSig will be. r = rpc.signrawtransaction(tx) assert (r['complete']) tx.vin[-1].scriptSig = r['tx'].vin[-1].scriptSig logging.debug('New size: %.3f KB, New fees: %s, %s BTC/KB' % \ (len(tx.serialize()) / 1000, str_money_value(value_in-value_out), str_money_value((value_in-value_out) / len(tx.serialize()) * 1000))) r = rpc.signrawtransaction(tx) assert (r['complete']) tx = r['tx'] if args.dryrun: print(b2x(tx.serialize())) else: logging.debug('Sending tx %s' % b2x(tx.serialize())) txid = rpc.sendrawtransaction(tx) print(b2lx(txid))
def to_s(o): if isinstance(o, bytes): return b2x(o) else: return repr(o)
def test_audit_contract_empty_transaction(): btc_network = BitcoinTestNet() tx = b2x(CTransaction().serialize()) with raises(ValueError, match='Given transaction has no outputs.'): btc_network.audit_contract('', tx)
def T(base58_privkey, expected_hex_pubkey, expected_is_compressed_value): key = CBitcoinSecret(base58_privkey) self.assertEqual(b2x(key.pub), expected_hex_pubkey) self.assertEqual(key.is_compressed, expected_is_compressed_value)
def T(str_addr, expected_scriptPubKey_hexbytes): addr = CBitcoinAddress(str_addr) actual_scriptPubKey = addr.to_scriptPubKey() self.assertEqual(b2x(actual_scriptPubKey), expected_scriptPubKey_hexbytes)
def build_cmdline(self): return self.path + [ '--nonces', str(self.rounds), '-i', b2x(self.header.serialize()) ]
def set_tx(self, tx): self.setData(self.index(0, 1), QVariant(b2x(tx.serialize())))
def raw_transaction(self): return b2x(self.tx.serialize())
tx.vin.append(new_txin) value_in += new_amount change_txout.nValue += new_amount value_out += new_amount r = rpc.signrawtransaction(tx) assert(r['complete']) tx.vin[-1].scriptSig = r['tx'].vin[-1].scriptSig r = rpc.signrawtransaction(tx) assert(r['complete']) tx = CMutableTransaction.from_tx(r['tx']) print('Payment raw transaction %s' % b2x(tx.serialize())) print('Payment raw transaction size: %.3f KB, fees: %s, %s BTC/KB' % \ (len(tx.serialize()) / 1000, str_money_value(value_in-value_out), str_money_value((value_in-value_out) / len(tx.serialize()) * 1000))) txid = rpc.sendrawtransaction(tx) print('Sent payment with txid: %s' % b2lx(txid)) print('Waiting for %d seconds before double spending' % 2) time.sleep(10)
# # Here we'll create that scriptPubKey from scratch using the pubkey that # corresponds to the secret key we generated above. txin_scriptPubKey = CScript([OP_DUP, OP_HASH160, Hash160(seckey.pub), OP_EQUALVERIFY, OP_CHECKSIG]) # Create the txout. This time we create the scriptPubKey from a Bitcoin # address. txout = CMutableTxOut(0.001*COIN, CBitcoinAddress('1C7zdTfnkzmr13HfA2vNm5SJYRK6nEKyq8').to_scriptPubKey()) # Create the unsigned transaction. tx = CMutableTransaction([txin], [txout]) # Calculate the signature hash for that transaction. sighash = SignatureHash(txin_scriptPubKey, tx, 0, SIGHASH_ALL) # Now sign it. We have to append the type of signature we want to the end, in # this case the usual SIGHASH_ALL. sig = seckey.sign(sighash) + bytes([SIGHASH_ALL]) # Set the scriptSig of our transaction input appropriately. txin.scriptSig = CScript([sig, seckey.pub]) # Verify the signature worked. This calls EvalScript() and actually executes # the opcodes in the scripts to see if everything worked out. If it doesn't an # exception will be raised. VerifyScript(txin.scriptSig, txin_scriptPubKey, tx, 0, (SCRIPT_VERIFY_P2SH,)) # Done! Print the transaction to standard output with the bytes-to-hex # function. print(b2x(tx.serialize()))
def copy_serialized(): row = self.view.selectedIndexes()[0].row() out = self.model.tx.vout[row] data = b2x(out.serialize()) QApplication.clipboard().setText(data)
try: r = proxy.signrawtransaction(tx, [], None, 'NONE|ANYONECANPAY') except bitcoin.rpc.JSONRPCException as exp: if exp.error['code'] == -13: pwd = getpass.getpass( 'Please enter the wallet passphrase with walletpassphrase first: ') proxy.walletpassphrase(pwd, 10) r = proxy.signrawtransaction(tx, [], None, 'NONE|ANYONECANPAY') else: raise exp if not r['complete']: print("Error! Couldn't sign transaction:") print(b2x(r['tx'].serialize())) sys.exit(1) signed_tx = r['tx'] # Do a sanity check on the transaction sum_value_discarded = 0 for txin in signed_tx.vin: r = proxy.gettxout(txin.prevout) sum_value_discarded += r['txout'].nValue # Abort if the amount is excessively large if sum_value_discarded > 0.10 * COIN: print( 'Aborting due to excessively large value being discarded. (>0.10 BTC)') sys.exit(1)
def __do_bitcoin(self): """Do Bitcoin-related maintenance""" # FIXME: we shouldn't have to create a new proxy each time, but with # current python-bitcoinlib and the RPC implementation it seems that # the proxy connection can timeout w/o recovering properly. proxy = bitcoin.rpc.Proxy() new_blocks = self.known_blocks.update_from_proxy(proxy) for (block_height, block_hash) in new_blocks: logging.info("New block %s at height %d" % (b2lx(block_hash), block_height)) # Save commitments to disk that have reached min_confirmations confirmed_tx = self.txs_waiting_for_confirmation.pop(block_height - self.min_confirmations + 1, None) if confirmed_tx is not None: self.__save_confirmed_timestamp_tx(confirmed_tx) # If there already are txs waiting for confirmation at this # block_height, there was a reorg and those pending commitments now # need to be added back to the pool reorged_tx = self.txs_waiting_for_confirmation.pop(block_height, None) if reorged_tx is not None: # FIXME: the reorged transaction might get mined in another # block, so just adding the commitments for it back to the pool # isn't ideal, but it is safe logging.info('tx %s at height %d removed by reorg, adding %d commitments back to pending' % (b2lx(reorged_tx.tx.GetHash()), block_height, len(reorged_tx.commitment_timestamps))) for reorged_commitment_timestamp in reorged_tx.commitment_timestamps: self.pending_commitments.add(reorged_commitment_timestamp.msg) # Check if this block contains any of the pending transactions try: block = proxy.getblock(block_hash) except KeyError: # Must have been a reorg or something, return logging.error("Failed to get block") return # Check all potential pending txs against this block. for tx in self.unconfirmed_txs: block_timestamp = make_timestamp_from_block(tx.tip_timestamp.msg, block, block_height) if block_timestamp is None: continue # Success! tx.tip_timestamp.merge(block_timestamp) for commitment_timestamp in tx.commitment_timestamps: self.pending_commitments.remove(commitment_timestamp.msg) logging.debug("Removed commitment %s from pending" % b2x(commitment_timestamp.msg)) assert self.min_confirmations > 1 logging.info("Success! %d commitments timestamped, now waiting for %d more confirmations" % (len(tx.commitment_timestamps), self.min_confirmations - 1)) # Add pending_tx to the list of timestamp transactions that # have been mined, and are waiting for confirmations. self.txs_waiting_for_confirmation[block_height] = tx # Since all unconfirmed txs conflict with each other, we can clear the entire lot self.unconfirmed_txs.clear() # And finally, we can reset the last time a timestamp # transaction was mined to right now. self.last_timestamp_tx = time.time() time_to_next_tx = int(self.last_timestamp_tx + self.min_tx_interval - time.time()) if time_to_next_tx > 0: # Minimum interval between transactions hasn't been reached, so do nothing logging.debug("Waiting %ds before next tx" % time_to_next_tx) return prev_tx = None if self.pending_commitments and not self.unconfirmed_txs: # Find the biggest unspent output that's confirmed unspent = find_unspent(proxy) if not len(unspent): logging.error("Can't timestamp; no spendable outputs") return # For the change scriptPubKey, we can save a few bytes by using # a pay-to-pubkey rather than the usual pay-to-pubkeyhash change_addr = proxy.getnewaddress() change_pubkey = proxy.validateaddress(change_addr)['pubkey'] change_scriptPubKey = CScript([change_pubkey, OP_CHECKSIG]) prev_tx = self.__create_new_timestamp_tx_template(unspent[-1]['outpoint'], unspent[-1]['amount'], change_scriptPubKey) logging.debug('New timestamp tx, spending output %r, value %s' % (unspent[-1]['outpoint'], str_money_value(unspent[-1]['amount']))) elif self.unconfirmed_txs: (prev_tx, prev_tip_timestamp, prev_commitment_timestamps) = self.unconfirmed_txs[-1] # Send the first transaction even if we don't have a new block if prev_tx and (new_blocks or not self.unconfirmed_txs): # Update the most recent timestamp transaction with new commitments commitment_timestamps = [Timestamp(commitment) for commitment in self.pending_commitments] # Remember that commitment_timestamps contains raw commitments, # which are longer than necessary, so we sha256 them before passing # them to make_merkle_tree, which concatenates whatever it gets (or # for the matter, returns what it gets if there's only one item for # the tree!) commitment_digest_timestamps = [stamp.ops.add(OpSHA256()) for stamp in commitment_timestamps] tip_timestamp = make_merkle_tree(commitment_digest_timestamps) sent_tx = None relay_feerate = self.relay_feerate while sent_tx is None: unsigned_tx = self.__update_timestamp_tx(prev_tx, tip_timestamp.msg, proxy.getblockcount(), relay_feerate) fee = _get_tx_fee(unsigned_tx, proxy) if fee is None: logging.debug("Can't determine txfee of transaction; skipping") return if fee > self.max_fee: logging.error("Maximum txfee reached!") return r = proxy.signrawtransaction(unsigned_tx) if not r['complete']: logging.error("Failed to sign transaction! r = %r" % r) return signed_tx = r['tx'] try: txid = proxy.sendrawtransaction(signed_tx) except bitcoin.rpc.JSONRPCError as err: if err.error['code'] == -26: logging.debug("Err: %r" % err.error) # Insufficient priority - basically means we didn't # pay enough, so try again with a higher feerate relay_feerate *= 2 continue else: raise err # something else, fail! sent_tx = signed_tx if self.unconfirmed_txs: logging.info("Sent timestamp tx %s, replacing %s; %d total commitments" % (b2lx(sent_tx.GetHash()), b2lx(prev_tx.GetHash()), len(commitment_timestamps))) else: logging.info("Sent timestamp tx %s; %d total commitments" % (b2lx(sent_tx.GetHash()), len(commitment_timestamps))) self.unconfirmed_txs.append(TimestampTx(sent_tx, tip_timestamp, commitment_timestamps))
def raw(self): return b2x(self.value.to_bytes())
def __do_bitcoin(self): """Do Bitmark-related maintenance""" # FIXME: we shouldn't have to create a new proxy each time, but with # current python-bitmarklib and the RPC implementation it seems that # the proxy connection can timeout w/o recovering properly. proxy = bitcoin.rpc.Proxy(btc_conf_file=coin_conf_file) new_blocks = self.known_blocks.update_from_proxy(proxy) # code after this if it's executed only when we have new blocks, it simplify reasoning at the cost of not # having a broadcasted tx immediately after we have a new cycle (the calendar wait the next block) if not new_blocks: return for (block_height, block_hash) in new_blocks: logging.info("New block %s at height %d" % (b2lx(block_hash), block_height)) # Save commitments to disk that have reached min_confirmations confirmed_tx = self.txs_waiting_for_confirmation.pop(block_height - self.min_confirmations + 1, None) if confirmed_tx is not None: self.__save_confirmed_timestamp_tx(confirmed_tx) # If there already are txs waiting for confirmation at this # block_height, there was a reorg and those pending commitments now # need to be added back to the pool reorged_tx = self.txs_waiting_for_confirmation.pop(block_height, None) if reorged_tx is not None: # FIXME: the reorged transaction might get mined in another # block, so just adding the commitments for it back to the pool # isn't ideal, but it is safe logging.info('tx %s at height %d removed by reorg, adding %d commitments back to pending' % (b2lx(reorged_tx.tx.GetTxid()), block_height, len(reorged_tx.commitment_timestamps))) for reorged_commitment_timestamp in reorged_tx.commitment_timestamps: self.pending_commitments.add(reorged_commitment_timestamp.msg) # Check if this block contains any of the pending transactions block = None while block is None: try: block = proxy.getblock(block_hash) except KeyError: # Must have been a reorg or something, return logging.error("Failed to get block") return except BrokenPipeError: logging.error("BrokenPipeError to get block") time.sleep(5) proxy = bitcoin.rpc.Proxy(btc_conf_file=coin_conf_file) # the following is an optimization, by pre computing the tx_id we rapidly check if our unconfirmed tx # is in the block block_txids = set(tx.GetTxid() for tx in block.vtx) # Check all potential pending txs against this block. # iterating in reverse order to prioritize most recent digest which commits to a bigger merkle tree for unconfirmed_tx in self.unconfirmed_txs[::-1]: if unconfirmed_tx.tx.GetTxid() not in block_txids: continue confirmed_tx = unconfirmed_tx # Success! Found tx block_timestamp = make_timestamp_from_block_tx(confirmed_tx, block, block_height) logging.info("Found commitment %s in tx %s" % (b2x(confirmed_tx.tip_timestamp.msg), b2lx(confirmed_tx.tx.GetTxid()))) # Success! (tip_timestamp, commitment_timestamps) = self.__pending_to_merkle_tree(confirmed_tx.n) mined_tx = TimestampTx(confirmed_tx.tx, tip_timestamp, commitment_timestamps) assert tip_timestamp.msg == unconfirmed_tx.tip_timestamp.msg mined_tx.tip_timestamp.merge(block_timestamp) for commitment in tuple(self.pending_commitments)[0:unconfirmed_tx.n]: self.pending_commitments.remove(commitment) logging.debug("Removed commitment %s from pending" % b2x(commitment)) assert self.min_confirmations > 1 logging.info("Success! %d commitments timestamped, now waiting for %d more confirmations" % (len(mined_tx.commitment_timestamps), self.min_confirmations - 1)) # Add pending_tx to the list of timestamp transactions that # have been mined, and are waiting for confirmations. self.txs_waiting_for_confirmation[block_height] = mined_tx # Erase all unconfirmed txs, as they all conflict with each other self.unconfirmed_txs.clear() # And finally, we can reset the last time a timestamp # transaction was mined to right now. self.last_timestamp_tx = time.time() break time_to_next_tx = int(self.last_timestamp_tx + self.min_tx_interval - time.time()) if time_to_next_tx > 0: # Minimum interval between transactions hasn't been reached, so do nothing logging.debug("Waiting %ds before next tx" % time_to_next_tx) return if not self.pending_commitments: logging.debug("No pending commitments, no tx needed") return if self.unconfirmed_txs: (prev_tx, prev_tip_timestamp, prev_commitment_timestamps) = self.unconfirmed_txs[-1] else: # first tx of a new cycle # Find the biggest unspent output that's confirmed unspent = find_unspent(proxy) if not len(unspent): logging.error("Can't timestamp; no spendable outputs") return change_addr = proxy.getnewaddress() prev_tx = self.__create_new_timestamp_tx_template(unspent[-1]['outpoint'], unspent[-1]['amount'], change_addr.to_scriptPubKey()) logging.debug('New timestamp tx, spending output %r, value %s' % (unspent[-1]['outpoint'], str_money_value(unspent[-1]['amount']))) (tip_timestamp, commitment_timestamps) = self.__pending_to_merkle_tree(len(self.pending_commitments)) logging.debug("New tip is %s" % b2x(tip_timestamp.msg)) # make_merkle_tree() seems to take long enough on really big adds # that the proxy dies proxy = bitcoin.rpc.Proxy(btc_conf_file=coin_conf_file) sent_tx = None relay_feerate = self.relay_feerate while sent_tx is None: unsigned_tx = self.__update_timestamp_tx(prev_tx, tip_timestamp.msg, proxy.getblockcount(), relay_feerate) fee = _get_tx_fee(unsigned_tx, proxy) if fee is None: logging.debug("Can't determine txfee of transaction; skipping") return if fee > self.max_fee: logging.error("Maximum txfee reached!") return r = proxy.signrawtransaction(unsigned_tx) if not r['complete']: logging.error("Failed to sign transaction! r = %r" % r) return signed_tx = r['tx'] try: proxy.sendrawtransaction(signed_tx) except bitcoin.rpc.JSONRPCError as err: if err.error['code'] == -26: logging.debug("Err: %r" % err.error) # Insufficient priority - basically means we didn't # pay enough, so try again with a higher feerate relay_feerate *= 2 continue else: raise err # something else, fail! sent_tx = signed_tx if self.unconfirmed_txs: logging.info("Sent timestamp tx %s, replacing %s; %d total commitments; %d prior tx versions" % (b2lx(sent_tx.GetTxid()), b2lx(prev_tx.GetTxid()), len(commitment_timestamps), len(self.unconfirmed_txs))) else: logging.info("Sent timestamp tx %s; %d total commitments" % (b2lx(sent_tx.GetTxid()), len(commitment_timestamps))) self.unconfirmed_txs.append(UnconfirmedTimestampTx(sent_tx, tip_timestamp, len(commitment_timestamps)))
print(user) print(sgx) wallet = Wallet(users, sgx) print(wallet) feerate = 10000 dust_tx_hex = "02000000000101305b59e3c4fd570247468e723f92fec0e59fa8836155b862a2edd3ef72476013010000001716001457c96285147bf0f667f7b32eb17c4619d7bf39c4fdffffff0250c30000000000001976a914567827d4bedca8a476fc0d6ab47dad54ad52379688ac7c2df1050000000017a914b566c90701c841abd200b9b83274ecd46039a8e6870247304402200896d6255b232221cae6cb1939bd1cb71e13cfb1eb337c00de279aeee143276d0220343eed9b30efdbe2f1b79a5d123485905acae71e91a7b403e3e8b2fedf1a5338012102a4e429c0092ba1640f94bce2462a172284bcf90cd071b67bb4b9f2b9245fb9a928010000" dust_outpoint = OutPointWithTx(dust_tx_hex, sgx.P2PKHScriptPubkey) wallet_deposit_tx_hex = "020000000001011bcaa178b7474cb54d352c9cc257b520f88a26a3c64a0fb32f139a6c6f12ce1000000000171600140fd1a7567003391dc2f794fc72078fe309100657fdffffff0300ca9a3b0000000017a91400593b17f9ff1e272c0086b46ec4161de2e89b4387b418d0b20000000017a91487dc561b8ce369d4d738d2fcd8297e29ec1398be8710270000000000001976a914567827d4bedca8a476fc0d6ab47dad54ad52379688ac0247304402200e92ab18c321831d0311628d710064eb01284ad7b36ff370b81e05b69c23d554022022f733925438860ae0e42f53951ac29450e5bbd96b13bfc5a21f357dc32198bc01210321b60b99cc75039de4fa75247a987dcb805edbde8fd903f2e475314e80b3c17198000000" wallet_depo_outpoint = OutPointWithTx(wallet_deposit_tx_hex, wallet.scriptPubkey()) # try accuse Alice life_signal, tx1, tx2 = wallet.accuse(dust_outpoint, wallet_depo_outpoint, 0, sgx_seckey) print('life signal redeem script: {}'.format(b2x(life_signal.redeemScript))) print('tx1 (hex):', b2x(tx1.serialize())) print('tx2 (hex):', b2x(tx2.serialize())) # life_signal_tx_hex = "0100000001b4fc5d489c3b238d98718b9a3ade94921f7cd4bf9a9e5962594e064fab626fa2000000006b48304502210092118be3693405f8ab2476ae4f9b28981c60b014cd8118d33d748dda6eb1651d0220162e0e1d59f790f7ac2582df86aef9dc5b33bd90695f86d800e28b434a813600012102f820895591103d4fa7c7bcb30f8c2a994641be4c8d8587415e70ae0a92fccf99ffffffff02102700000000000017a914eeaf2e143a578e234331ed362faa5d99080d9f5a87e0b0f505000000001976a914567827d4bedca8a476fc0d6ab47dad54ad52379688ac00000000" life_signal_tx_hex = b2x(tx1.serialize()) tx_appeal = wallet.appeal( 0, user_seckeys[0], OutPointWithTx( tx_hex=life_signal_tx_hex, targetScriptPubkey=life_signal.redeemScript.to_p2sh_scriptPubKey())) print('tx_appeal (hex):', b2x(tx_appeal.serialize()))
def to_raw_tx(self): """ return the raw, serialized transaction """ return b2x(self.tx.serialize())
def __str__(self): return "name={}, addr={}, pubkey={}, keyhash={}".format( self.name, self.P2PKHScriptAddress, b2x(self.pubkey), b2x(self.keyhash)).replace(",", "\t")
def do_GET(self): if self.path == '/': self.send_response(200) self.send_header('Content-type', 'text/html') # Humans are likely to be refreshing this, so keep it up-to-date self.send_header('Cache-Control', 'public, max-age=1') self.end_headers() proxy = bitcoin.rpc.Proxy() # FIXME: Unfortunately getbalance() doesn't return the right thing; # need to investigate further, but this seems to work. str_wallet_balance = str(proxy._call("getbalance")) welcome_page = """\ <html> <head> <title>OpenTimestamps Calendar Server</title> </head> <body> <p>This is an <a href="https://opentimestamps.org">OpenTimestamps</a> <a href="https://github.com/opentimestamps/opentimestamps-server">Calendar Server</a> (v%s)</p> <p> Pending commitments: %d</br> Transactions waiting for confirmation: %d</br> Most recent timestamp tx: %s (%d prior versions)</br> Most recent merkle tree tip: %s</br> Best-block: %s, height %d</br> </br> Wallet balance: %s BTC</br> </p> <p> You can donate to the wallet by sending funds to: %s</br> This address changes after every donation. </p> </body> </html> """ % (otsserver.__version__, len(self.calendar.stamper.pending_commitments), len(self.calendar.stamper.txs_waiting_for_confirmation), b2lx(self.calendar.stamper.unconfirmed_txs[-1].tx.GetTxid()) if self.calendar.stamper.unconfirmed_txs else 'None', max(0, len(self.calendar.stamper.unconfirmed_txs) - 1), b2x(self.calendar.stamper.unconfirmed_txs[-1].tip_timestamp.msg) if self.calendar.stamper.unconfirmed_txs else 'None', bitcoin.core.b2lx(proxy.getbestblockhash()), proxy.getblockcount(), str_wallet_balance, str(proxy.getaccountaddress(''))) self.wfile.write(welcome_page.encode()) elif self.path.startswith('/timestamp/'): self.get_timestamp() elif self.path == '/tip': self.get_tip() else: self.send_response(404) self.send_header('Content-type', 'text/plain') # a 404 is only going to become not a 404 if the server is upgraded self.send_header('Cache-Control', 'public, max-age=3600') self.end_headers() self.wfile.write(b'Not found')
def upgrade_timestamp(timestamp, args): """Attempt to upgrade an incomplete timestamp to make it verifiable Returns True if the timestamp has changed, False otherwise. Note that this means if the timestamp that is already complete, False will be returned as nothing has changed. """ def directly_verified(stamp): if stamp.attestations: yield stamp else: for result_stamp in stamp.ops.values(): yield from directly_verified(result_stamp) yield from () def get_attestations(stamp): return set(attest for msg, attest in stamp.all_attestations()) changed = False # First, check the cache for upgrades to this timestamp. Since the cache is # local, we do this very agressively, checking every single sub-timestamp # against the cache. def walk_stamp(stamp): yield stamp for sub_stamp in stamp.ops.values(): yield from walk_stamp(sub_stamp) existing_attestations = get_attestations(timestamp) for sub_stamp in walk_stamp(timestamp): try: cached_stamp = args.cache[sub_stamp.msg] except KeyError: continue sub_stamp.merge(cached_stamp) new_attestations_from_cache = get_attestations(timestamp).difference( existing_attestations) if len(new_attestations_from_cache): changed = True logging.info("Got %d attestation(s) from cache" % len(new_attestations_from_cache)) existing_attestations.update(new_attestations_from_cache) for new_att in new_attestations_from_cache: logging.debug(" %r" % new_att) while not is_timestamp_complete(timestamp, args): # Check remote calendars for upgrades. # # This time we only check PendingAttestations - we can't be as # agressive. found_new_attestations = False for sub_stamp in directly_verified(timestamp): for attestation in sub_stamp.attestations: if attestation.__class__ == PendingAttestation: calendar_urls = args.calendar_urls if calendar_urls: # FIXME: this message is incorrectly displayed, disabling for now. # # logging.debug("Attestation URI %s overridden by user-specified remote calendar(s)" % attestation.uri) pass else: if attestation.uri in args.whitelist: calendar_urls = [attestation.uri] else: logging.warning( "Ignoring attestation from calendar %s: Calendar not in whitelist" % attestation.uri) continue commitment = sub_stamp.msg for calendar_url in calendar_urls: logging.debug("Checking calendar %s for %s" % (attestation.uri, b2x(commitment))) calendar = remote_calendar(calendar_url) try: upgraded_stamp = calendar.get_timestamp(commitment) except opentimestamps.calendar.CommitmentNotFoundError as exp: logging.warning("Calendar %s: %s" % (attestation.uri, exp.reason)) continue except urllib.error.URLError as exp: logging.warning("Calendar %s: %s" % (attestation.uri, exp.reason)) continue atts_from_remote = get_attestations(upgraded_stamp) if atts_from_remote: logging.info("Got %d attestation(s) from %s" % (len(atts_from_remote), calendar_url)) for att in get_attestations(upgraded_stamp): logging.debug(" %r" % att) new_attestations = get_attestations( upgraded_stamp).difference(existing_attestations) if new_attestations: changed = True found_new_attestations = True existing_attestations.update(new_attestations) # FIXME: need to think about DoS attacks here args.cache.merge(upgraded_stamp) sub_stamp.merge(upgraded_stamp) if not args.wait: break elif found_new_attestations: # We got something new, so loop around immediately to check if # we're now complete continue else: # Nothing new, so wait logging.info( "Timestamp not complete; waiting %d sec before trying again" % args.wait_interval) time.sleep(args.wait_interval) return changed
def do_GET(self): if self.path == '/': self.send_response(200) self.send_header('Content-type', 'text/html') # Humans are likely to be refreshing this, so keep it up-to-date self.send_header('Cache-Control', 'public, max-age=1') self.end_headers() proxy = bitcoin.rpc.Proxy() # FIXME: Unfortunately getbalance() doesn't return the right thing; # need to investigate further, but this seems to work. str_wallet_balance = str(proxy._call("getbalance")) transactions = proxy._call("listtransactions", "", 1000) # We want only the confirmed txs containing an OP_RETURN, from most to least recent transactions = list(filter(lambda x: x["confirmations"] > 0 and x["amount"] == 0, transactions)) a_week_ago = (datetime.date.today() - datetime.timedelta(days=7)).timetuple() a_week_ago_posix = time.mktime(a_week_ago) transactions_in_last_week = list(filter(lambda x: x["time"] > a_week_ago_posix, transactions)) fees_in_last_week = reduce(lambda a,b: a-b["fee"], transactions_in_last_week, 0) try: time_between_transactions = str(round(168 / len(transactions_in_last_week), 2)) # in hours based on 168 hours in a week time_between_transactions += " hours" except ZeroDivisionError: time_between_transactions = "N/A" transactions.sort(key=lambda x: x["confirmations"]) homepage_template = """<html> <head> <title>OpenTimestamps Calendar Server</title> </head> <body> <p>This is an <a href="https://opentimestamps.org">OpenTimestamps</a> <a href="https://github.com/opentimestamps/opentimestamps-server">Calendar Server</a> (v{{ version }})</p> <p> Pending commitments: {{ pending_commitments }}</br> Transactions waiting for confirmation: {{ txs_waiting_for_confirmation }}</br> Most recent timestamp tx: {{ most_recent_tx }} ({{ prior_versions }} prior versions)</br> Most recent merkle tree tip: {{ tip }}</br> Best-block: {{ best_block }}, height {{ block_height }}</br> </br> Wallet balance: {{ balance }} BTC</br> </p> <p> You can donate to the wallet by sending funds to: {{ address }}</br> This address changes after every donation. </p> <p> Average time between transactions in the last week: {{ time_between_transactions }} </br> Fees used in the last week: {{ fees_in_last_week }} BTC</br> Latest transactions: </br> {{#transactions}} {{txid}} </br> {{/transactions}} </p> </body> </html>""" stats = { 'version': otsserver.__version__, 'pending_commitments': len(self.calendar.stamper.pending_commitments), 'txs_waiting_for_confirmation':len(self.calendar.stamper.txs_waiting_for_confirmation), 'most_recent_tx': b2lx(self.calendar.stamper.unconfirmed_txs[-1].tx.GetTxid()) if self.calendar.stamper.unconfirmed_txs else 'None', 'prior_versions': max(0, len(self.calendar.stamper.unconfirmed_txs) - 1), 'tip': b2x(self.calendar.stamper.unconfirmed_txs[-1].tip_timestamp.msg) if self.calendar.stamper.unconfirmed_txs else 'None', 'best_block': bitcoin.core.b2lx(proxy.getbestblockhash()), 'block_height': proxy.getblockcount(), 'balance': str_wallet_balance, 'address': str(proxy.getaccountaddress('')), 'transactions': transactions[:5], 'time_between_transactions': time_between_transactions, 'fees_in_last_week': fees_in_last_week, } welcome_page = renderer.render(homepage_template, stats) self.wfile.write(str.encode(welcome_page)) elif self.path.startswith('/timestamp/'): self.get_timestamp() elif self.path == '/tip': self.get_tip() elif self.path.startswith('/experimental/backup/'): self.get_backup() else: self.send_response(404) self.send_header('Content-type', 'text/plain') # a 404 is only going to become not a 404 if the server is upgraded self.send_header('Cache-Control', 'public, max-age=3600') self.end_headers() self.wfile.write(b'Not found')
def return_coins_tx(amount_to_send, last_tx, lock_time, script): txin = create_txin(b2x(last_tx.GetTxid()), 0) txout = create_txout(amount_to_send, P2PKH_scriptPubKey(alice_address_BTC)) tx = CMutableTransaction([txin], [txout], nLockTime=lock_time) return tx
def broadcast_transaction(tx): raw_transaction = b2x(tx.serialize()) headers = {'content-type': 'application/x-www-form-urlencoded'} return requests.post('https://api.blockcypher.com/v1/btc/test3/txs/push', headers=headers, data='{"tx": "%s"}' % raw_transaction)
def is_valid(priv): n = 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFE BAAEDCE6 AF48A03B BFD25E8C D0364141' priv = int(b2x(priv), 16) n = int(n.replace(' ', ''), 16) assert priv > 0 assert priv < n device = get_transport() client = TrezorClient(transport=device, ui=ClickUI()) n_path = parse_path(path) info = get_public_node(client, n_path, coin_name=coin) side, pubkey = (info.node.public_key[0], info.node.public_key[1:]) left = True if side == 2 else False print("seed", b2x(pubkey), side) priv = decrypt_keyvalue(client, n_path, path, pubkey, ask_on_decrypt=False, ask_on_encrypt=side) is_valid(priv) print("priv", b2x(priv), left) client.close() SelectParams(coin.lower()) seckey = CBitcoinSecret.from_secret_bytes(priv) # Create a witnessScript and corresponding redeemScript. Similar to a scriptPubKey
def is_valid(priv): n = 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFE BAAEDCE6 AF48A03B BFD25E8C D0364141' priv = int(b2x(priv), 16) n = int(n.replace(' ', ''), 16) assert priv > 0 assert priv < n
SelectParams('testnet') MONGOCONNECTION = pymongo.Connection('52.1.141.196', 27017) MONGODB = MONGOCONNECTION.escrow.demo escrow = MONGODB.find_one( {'buyerurlhash': "906618b107da70ed301d701ce8dbff533f35812d"}) phrase = "sample core fitness wrong unusual inch hurry chaos myself credit welcome margin" seed = mnemonic.Mnemonic.to_seed(phrase) wallet = BIP32Node.from_master_secret(seed, 'XTN') toddkeys = [] keys = [] for k in escrow['keys']: print k['subkey'] hdkey = wallet.subkey_for_path(k['subkey']) print b2x(CKey(hdkey.sec()).pub) print hdkey.sec_as_hex() print k['publickey'] print "" toddkeys.append(CKey(hdkey.sec())) keys.append(CPubKey(hdkey.address())) """ keys = [] for pubkey in escrow['keys']: print "PUBLIC KEY", pubkey['publickey'] keys.append(CPubKey(pubkey['publickey'])) """ # Create a redeemScript. Similar to a scriptPubKey the redeemScript must be # satisfied for the funds to be spent. redeemScript = CScript(keys)