Esempio n. 1
0
            chan_ba=chan_ba,
            chan_bd=chan_bd,
            chan_ca=chan_ca,
            chan_cd=chan_cd,
            chan_db=chan_db,
            chan_dc=chan_dc,
        )

    @staticmethod
    async def prepare_invoice(
        w2: MockLNWallet,  # receiver
        *,
        amount_msat=100_000_000,
        include_routing_hints=False,
    ):
        amount_btc = amount_msat / Decimal(COIN * 1000)
        payment_preimage = os.urandom(32)
        RHASH = sha256(payment_preimage)
        info = PaymentInfo(RHASH, amount_msat, RECEIVED, PR_UNPAID)
        w2.save_preimage(RHASH, payment_preimage)
        w2.save_payment_info(info)
        if include_routing_hints:
            routing_hints = await w2._calc_routing_hints_for_invoice(
                amount_msat)
        else:
            routing_hints = []
        lnaddr = LnAddr(paymenthash=RHASH,
                        amount=amount_btc,
                        tags=[('c', lnutil.MIN_FINAL_CLTV_EXPIRY_FOR_INVOICE),
                              ('d', 'coffee')] + routing_hints)
        return lnencode(lnaddr, w2.node_keypair.privkey)
Esempio n. 2
0
 def skip_mine(self, node, txid, sign, redeem_script=""):
     send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
     block = node.generate(1)
     assert_equal(len(node.getblock(block[0])["tx"]), 1)
     sync_blocks(self.nodes)
Esempio n. 3
0
    def run_test(self):
        self.nodes[0].generate(161)  # block 161

        self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork")
        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
        tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']})
        assert(tmpl['sizelimit'] == 1000000)
        assert('weightlimit' not in tmpl)
        assert(tmpl['sigoplimit'] == 20000)
        assert(tmpl['transactions'][0]['hash'] == txid)
        assert(tmpl['transactions'][0]['sigops'] == 2)
        tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']})
        assert(tmpl['sizelimit'] == 1000000)
        assert('weightlimit' not in tmpl)
        assert(tmpl['sigoplimit'] == 20000)
        assert(tmpl['transactions'][0]['hash'] == txid)
        assert(tmpl['transactions'][0]['sigops'] == 2)
        self.nodes[0].generate(1)  # block 162

        balance_presetup = self.nodes[0].getbalance()
        self.pubkey = []
        p2sh_ids = []  # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh
        wit_ids = []  # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness
        for i in range(3):
            newaddress = self.nodes[i].getnewaddress()
            self.pubkey.append(self.nodes[i].getaddressinfo(newaddress)["pubkey"])
            multiscript = CScript([OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG])
            p2sh_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'p2sh-segwit')['address']
            bip173_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'bech32')['address']
            assert_equal(p2sh_ms_addr, script_to_p2sh_p2wsh(multiscript))
            assert_equal(bip173_ms_addr, script_to_p2wsh(multiscript))
            p2sh_ids.append([])
            wit_ids.append([])
            for v in range(2):
                p2sh_ids[i].append([])
                wit_ids[i].append([])

        for i in range(5):
            for n in range(3):
                for v in range(2):
                    wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999")))
                    p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999")))

        self.nodes[0].generate(1)  # block 163
        sync_blocks(self.nodes)

        # Make sure all nodes recognize the transactions as theirs
        assert_equal(self.nodes[0].getbalance(), balance_presetup - 60 * 50 + 20 * Decimal("49.999") + 50)
        assert_equal(self.nodes[1].getbalance(), 20 * Decimal("49.999"))
        assert_equal(self.nodes[2].getbalance(), 20 * Decimal("49.999"))

        self.nodes[0].generate(260)  # block 423
        sync_blocks(self.nodes)

        self.log.info("Verify witness txs are skipped for mining before the fork")
        self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True)  # block 424
        self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True)  # block 425
        self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True)  # block 426
        self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True)  # block 427

        self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid")
        self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False)
        self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False)

        self.nodes[2].generate(4)  # blocks 428-431

        self.log.info("Verify previous witness txs skipped for mining can now be mined")
        assert_equal(len(self.nodes[2].getrawmempool()), 4)
        blockhash = self.nodes[2].generate(1)[0]  # block 432 (first block with new rules; 432 = 144 * 3)
        sync_blocks(self.nodes)
        assert_equal(len(self.nodes[2].getrawmempool()), 0)
        segwit_tx_list = self.nodes[2].getblock(blockhash)["tx"]
        assert_equal(len(segwit_tx_list), 5)

        self.log.info("Verify default node can't accept txs with missing witness")
        # unsigned, no scriptsig
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V0][0], False)
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V1][0], False)
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False)
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False)
        # unsigned with redeem script
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False, witness_script(False, self.pubkey[0]))
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False, witness_script(True, self.pubkey[0]))

        self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag")
        assert(self.nodes[2].getblock(blockhash, False) != self.nodes[0].getblock(blockhash, False))
        assert(self.nodes[1].getblock(blockhash, False) == self.nodes[2].getblock(blockhash, False))

        for tx_id in segwit_tx_list:
            tx = FromHex(CTransaction(), self.nodes[2].gettransaction(tx_id)["hex"])
            assert(self.nodes[2].getrawtransaction(tx_id, False, blockhash) != self.nodes[0].getrawtransaction(tx_id, False, blockhash))
            assert(self.nodes[1].getrawtransaction(tx_id, False, blockhash) == self.nodes[2].getrawtransaction(tx_id, False, blockhash))
            assert(self.nodes[0].getrawtransaction(tx_id, False, blockhash) != self.nodes[2].gettransaction(tx_id)["hex"])
            assert(self.nodes[1].getrawtransaction(tx_id, False, blockhash) == self.nodes[2].gettransaction(tx_id)["hex"])
            assert(self.nodes[0].getrawtransaction(tx_id, False, blockhash) == bytes_to_hex_str(tx.serialize_without_witness()))

        self.log.info("Verify witness txs without witness data are invalid after the fork")
        self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', wit_ids[NODE_2][WIT_V0][2], sign=False)
        self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', wit_ids[NODE_2][WIT_V1][2], sign=False)
        self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', p2sh_ids[NODE_2][WIT_V0][2], sign=False, redeem_script=witness_script(False, self.pubkey[2]))
        self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', p2sh_ids[NODE_2][WIT_V1][2], sign=False, redeem_script=witness_script(True, self.pubkey[2]))

        self.log.info("Verify default node can now use witness txs")
        self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True)  # block 432
        self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True)  # block 433
        self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True)  # block 434
        self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True)  # block 435

        self.log.info("Verify sigops are counted in GBT with BIP141 rules after the fork")
        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
        tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']})
        assert(tmpl['sizelimit'] >= 3999577)  # actual maximum size is lower due to minimum mandatory non-witness data
        assert(tmpl['weightlimit'] == 4000000)
        assert(tmpl['sigoplimit'] == 80000)
        assert(tmpl['transactions'][0]['txid'] == txid)
        assert(tmpl['transactions'][0]['sigops'] == 8)

        self.nodes[0].generate(1)  # Mine a block to clear the gbt cache

        self.log.info("Non-segwit miners are able to use GBT response after activation.")
        # Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) ->
        #                      tx2 (segwit input, paying to a non-segwit output) ->
        #                      tx3 (non-segwit input, paying to a non-segwit output).
        # tx1 is allowed to appear in the block, but no others.
        txid1 = send_to_witness(1, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996"))
        hex_tx = self.nodes[0].gettransaction(txid)['hex']
        tx = FromHex(CTransaction(), hex_tx)
        assert(tx.wit.is_null())  # This should not be a segwit input
        assert(txid1 in self.nodes[0].getrawmempool())

        # Now create tx2, which will spend from txid1.
        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b''))
        tx.vout.append(CTxOut(int(49.99 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
        tx2_hex = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))['hex']
        txid2 = self.nodes[0].sendrawtransaction(tx2_hex)
        tx = FromHex(CTransaction(), tx2_hex)
        assert(not tx.wit.is_null())

        # Now create tx3, which will spend from txid2
        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b""))
        tx.vout.append(CTxOut(int(49.95 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))  # Huge fee
        tx.calc_sha256()
        txid3 = self.nodes[0].sendrawtransaction(ToHex(tx))
        assert(tx.wit.is_null())
        assert(txid3 in self.nodes[0].getrawmempool())

        # Check that getblocktemplate includes all transactions.
        template = self.nodes[0].getblocktemplate({"rules": ["segwit"]})
        template_txids = [t['txid'] for t in template['transactions']]
        assert(txid1 in template_txids)
        assert(txid2 in template_txids)
        assert(txid3 in template_txids)

        # Check that wtxid is properly reported in mempool entry
        assert_equal(int(self.nodes[0].getmempoolentry(txid3)["wtxid"], 16), tx.calc_sha256(True))

        # Mine a block to clear the gbt cache again.
        self.nodes[0].generate(1)

        self.log.info("Verify behaviour of importaddress and listunspent")

        # Some public keys to be used later
        pubkeys = [
            "0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242",  # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb
            "02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF",  # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97
            "04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E",  # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV
            "02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538",  # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd
            "036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228",  # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66
            "0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC",  # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K
            "0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84",  # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ
        ]

        # Import a compressed key and an uncompressed key, generate some multisig addresses
        self.nodes[0].importprivkey("92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn")
        uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"]
        self.nodes[0].importprivkey("cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR")
        compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"]
        assert not self.nodes[0].getaddressinfo(uncompressed_spendable_address[0])['iscompressed']
        assert self.nodes[0].getaddressinfo(compressed_spendable_address[0])['iscompressed']

        self.nodes[0].importpubkey(pubkeys[0])
        compressed_solvable_address = [key_to_p2pkh(pubkeys[0])]
        self.nodes[0].importpubkey(pubkeys[1])
        compressed_solvable_address.append(key_to_p2pkh(pubkeys[1]))
        self.nodes[0].importpubkey(pubkeys[2])
        uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])]

        spendable_anytime = []                      # These outputs should be seen anytime after importprivkey and addmultisigaddress
        spendable_after_importaddress = []          # These outputs should be seen after importaddress
        solvable_after_importaddress = []           # These outputs should be seen after importaddress but not spendable
        unsolvable_after_importaddress = []         # These outputs should be unsolvable after importaddress
        solvable_anytime = []                       # These outputs should be solvable after importpubkey
        unseen_anytime = []                         # These outputs should never be seen

        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address'])
        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address'])
        compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address'])
        uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], uncompressed_solvable_address[0]])['address'])
        compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address'])
        compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], compressed_solvable_address[1]])['address'])

        # Test multisig_without_privkey
        # We have 2 public keys without private keys, use addmultisigaddress to add to wallet.
        # Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address.

        multisig_without_privkey_address = self.nodes[0].addmultisigaddress(2, [pubkeys[3], pubkeys[4]])['address']
        script = CScript([OP_2, hex_str_to_bytes(pubkeys[3]), hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG])
        solvable_after_importaddress.append(CScript([OP_HASH160, hash160(script), OP_EQUAL]))

        for i in compressed_spendable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # p2sh multisig with compressed keys should always be spendable
                spendable_anytime.extend([p2sh])
                # bare multisig can be watched and signed, but is not treated as ours
                solvable_after_importaddress.extend([bare])
                # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress
                spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh])
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # normal P2PKH and P2PK with compressed keys should always be spendable
                spendable_anytime.extend([p2pkh, p2pk])
                # P2SH_P2PK, P2SH_P2PKH with compressed keys are spendable after direct importaddress
                spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
                # P2WPKH and P2SH_P2WPKH with compressed keys should always be spendable
                spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])

        for i in uncompressed_spendable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # p2sh multisig with uncompressed keys should always be spendable
                spendable_anytime.extend([p2sh])
                # bare multisig can be watched and signed, but is not treated as ours
                solvable_after_importaddress.extend([bare])
                # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
                unseen_anytime.extend([p2wsh, p2sh_p2wsh])
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # normal P2PKH and P2PK with uncompressed keys should always be spendable
                spendable_anytime.extend([p2pkh, p2pk])
                # P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress
                spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
                # Witness output types with uncompressed keys are never seen
                unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])

        for i in compressed_solvable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                # Multisig without private is not seen after addmultisigaddress, but seen after importaddress
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                solvable_after_importaddress.extend([bare, p2sh, p2wsh, p2sh_p2wsh])
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # normal P2PKH, P2PK, P2WPKH and P2SH_P2WPKH with compressed keys should always be seen
                solvable_anytime.extend([p2pkh, p2pk, p2wpkh, p2sh_p2wpkh])
                # P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after direct importaddress
                solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])

        for i in uncompressed_solvable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress
                solvable_after_importaddress.extend([bare, p2sh])
                # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
                unseen_anytime.extend([p2wsh, p2sh_p2wsh])
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # normal P2PKH and P2PK with uncompressed keys should always be seen
                solvable_anytime.extend([p2pkh, p2pk])
                # P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress
                solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
                # Witness output types with uncompressed keys are never seen
                unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])

        op1 = CScript([OP_1])
        op0 = CScript([OP_0])
        # 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V
        unsolvable_address_key = hex_str_to_bytes("02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D")
        unsolvablep2pkh = CScript([OP_DUP, OP_HASH160, hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG])
        unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)])
        p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL])
        p2wshop1 = CScript([OP_0, sha256(op1)])
        unsolvable_after_importaddress.append(unsolvablep2pkh)
        unsolvable_after_importaddress.append(unsolvablep2wshp2pkh)
        unsolvable_after_importaddress.append(op1)  # OP_1 will be imported as script
        unsolvable_after_importaddress.append(p2wshop1)
        unseen_anytime.append(op0)  # OP_0 will be imported as P2SH address with no script provided
        unsolvable_after_importaddress.append(p2shop0)

        spendable_txid = []
        solvable_txid = []
        spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime, 2))
        solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime, 1))
        self.mine_and_test_listunspent(spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0)

        importlist = []
        for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                bare = hex_str_to_bytes(v['hex'])
                importlist.append(bytes_to_hex_str(bare))
                importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(bare)])))
            else:
                pubkey = hex_str_to_bytes(v['pubkey'])
                p2pk = CScript([pubkey, OP_CHECKSIG])
                p2pkh = CScript([OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG])
                importlist.append(bytes_to_hex_str(p2pk))
                importlist.append(bytes_to_hex_str(p2pkh))
                importlist.append(bytes_to_hex_str(CScript([OP_0, hash160(pubkey)])))
                importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pk)])))
                importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)])))

        importlist.append(bytes_to_hex_str(unsolvablep2pkh))
        importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh))
        importlist.append(bytes_to_hex_str(op1))
        importlist.append(bytes_to_hex_str(p2wshop1))

        for i in importlist:
            # import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC
            # exceptions and continue.
            try_rpc(-4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True)

        self.nodes[0].importaddress(script_to_p2sh(op0))  # import OP_0 as address only
        self.nodes[0].importaddress(multisig_without_privkey_address)  # Test multisig_without_privkey

        spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
        solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
        self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
        self.mine_and_test_listunspent(unseen_anytime, 0)

        spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
        solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
        self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
        self.mine_and_test_listunspent(unseen_anytime, 0)

        # Repeat some tests. This time we don't add witness scripts with importaddress
        # Import a compressed key and an uncompressed key, generate some multisig addresses
        self.nodes[0].importprivkey("927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH")
        uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"]
        self.nodes[0].importprivkey("cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw")
        compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"]

        self.nodes[0].importpubkey(pubkeys[5])
        compressed_solvable_address = [key_to_p2pkh(pubkeys[5])]
        self.nodes[0].importpubkey(pubkeys[6])
        uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])]

        unseen_anytime = []                         # These outputs should never be seen
        solvable_anytime = []                       # These outputs should be solvable after importpubkey
        unseen_anytime = []                         # These outputs should never be seen

        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address'])
        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address'])
        compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address'])
        uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], uncompressed_solvable_address[0]])['address'])
        compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address'])

        premature_witaddress = []

        for i in compressed_spendable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                premature_witaddress.append(script_to_p2sh(p2wsh))
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # P2WPKH, P2SH_P2WPKH are always spendable
                spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])

        for i in uncompressed_spendable_address + uncompressed_solvable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
                unseen_anytime.extend([p2wsh, p2sh_p2wsh])
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen
                unseen_anytime.extend([p2wpkh, p2sh_p2wpkh])

        for i in compressed_solvable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                premature_witaddress.append(script_to_p2sh(p2wsh))
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # P2SH_P2PK, P2SH_P2PKH with compressed keys are always solvable
                solvable_anytime.extend([p2wpkh, p2sh_p2wpkh])

        self.mine_and_test_listunspent(spendable_anytime, 2)
        self.mine_and_test_listunspent(solvable_anytime, 1)
        self.mine_and_test_listunspent(unseen_anytime, 0)

        # Check that createrawtransaction/decoderawtransaction with non-v0 Bech32 works
        v1_addr = program_to_witness(1, [3, 5])
        v1_tx = self.nodes[0].createrawtransaction([getutxo(spendable_txid[0])], {v1_addr: 1})
        v1_decoded = self.nodes[1].decoderawtransaction(v1_tx)
        assert_equal(v1_decoded['vout'][0]['scriptPubKey']['addresses'][0], v1_addr)
        assert_equal(v1_decoded['vout'][0]['scriptPubKey']['hex'], "51020305")

        # Check that spendable outputs are really spendable
        self.create_and_mine_tx_from_txids(spendable_txid)

        # import all the private keys so solvable addresses become spendable
        self.nodes[0].importprivkey("cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb")
        self.nodes[0].importprivkey("cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97")
        self.nodes[0].importprivkey("91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV")
        self.nodes[0].importprivkey("cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd")
        self.nodes[0].importprivkey("cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66")
        self.nodes[0].importprivkey("cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K")
        self.create_and_mine_tx_from_txids(solvable_txid)

        # Test that importing native P2WPKH/P2WSH scripts works
        for use_p2wsh in [False, True]:
            if use_p2wsh:
                scriptPubKey = "00203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a"
                transaction = "01000000000100e1f505000000002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000"
            else:
                scriptPubKey = "a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d87"
                transaction = "01000000000100e1f5050000000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000"

            self.nodes[1].importaddress(scriptPubKey, "", False)
            rawtxfund = self.nodes[1].fundrawtransaction(transaction)['hex']
            rawtxfund = self.nodes[1].signrawtransactionwithwallet(rawtxfund)["hex"]
            txid = self.nodes[1].sendrawtransaction(rawtxfund)

            assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
            assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)

            # Assert it is properly saved
            self.stop_node(1)
            self.start_node(1)
            assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
            assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
Esempio n. 4
0
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.test_framework import MINIMAL_SC_HEIGHT, MINER_REWARD_POST_H200
from test_framework.authproxy import JSONRPCException
from test_framework.util import assert_equal, initialize_chain_clean, get_epoch_data, \
    start_nodes, sync_blocks, sync_mempools, connect_nodes_bi, mark_logs, swap_bytes
from test_framework.mc_test.mc_test import *
import os
from decimal import Decimal
import pprint
import time

DEBUG_MODE = 1
NUMB_OF_NODES = 4
EPOCH_LENGTH = 5
FT_SC_FEE = Decimal('0')
MBTR_SC_FEE = Decimal('0')
CERT_FEE = Decimal('0.00015')


class sc_cert_change(BitcoinTestFramework):

    alert_filename = None

    def setup_chain(self, split=False):
        print("Initializing test directory " + self.options.tmpdir)
        initialize_chain_clean(self.options.tmpdir, NUMB_OF_NODES)
        self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
        with open(self.alert_filename, 'w'):
            pass  # Just open then close to create zero-length file
Esempio n. 5
0
    def run_test(self):
        '''
        1) node0 create sidechain with 10.0 coins
           reach epoch 0
        2) node0 create a cert_ep0 for funding node1 1.0 coins
           reach epoch 1
        3) node0 create a cert_ep1 for funding node2 2.0 coins
           reach epoch 2
        4) node1 create a cert_ep2 for funding node3 3.0 coins: he uses cert_ep0 as input, change will be obtained out of a fee=0.0001
           node0 mine a new block
        5) node1 has just one UTXO with the change of cert_ep0 and it should be 0.999, it sends 0.5 to node3
           node0 mine a new block
        6) node3 has 0.5 balance and 3.0 immature from cert_ep2
        '''

        # cross chain transfer amounts
        creation_amount = Decimal("10.0")

        mark_logs("Node 0 generates {} block".format(MINIMAL_SC_HEIGHT),
                  self.nodes, DEBUG_MODE)
        self.nodes[0].generate(MINIMAL_SC_HEIGHT)
        self.sync_all()

        # (1) node0 create sidechain with 10.0 coins
        mcTest = CertTestUtils(self.options.tmpdir, self.options.srcdir)
        vk = mcTest.generate_params("sc1")
        constant = generate_random_field_element_hex()
        cmdInput = {
            "version": 0,
            "withdrawalEpochLength": EPOCH_LENGTH,
            "toaddress": "dada",
            "amount": creation_amount,
            "wCertVk": vk,
            "constant": constant
        }

        ret = self.nodes[0].sc_create(cmdInput)
        creating_tx = ret['txid']
        scid = ret['scid']
        scid_swapped = str(swap_bytes(scid))
        mark_logs(
            "Node 0 created the SC spending {} coins via tx {}.".format(
                creation_amount, creating_tx), self.nodes, DEBUG_MODE)
        self.sync_all()

        decoded_tx = self.nodes[0].getrawtransaction(creating_tx, 1)
        assert_equal(scid, decoded_tx['vsc_ccout'][0]['scid'])
        mark_logs("created SC id: {}".format(scid), self.nodes, DEBUG_MODE)

        mark_logs("Node0 generates 5 blocks to achieve end of epoch",
                  self.nodes, DEBUG_MODE)
        prev_epoch_block_hash = self.nodes[0].getblockhash(
            self.nodes[0].getblockcount())
        self.nodes[0].generate(5)
        self.sync_all()
        epoch_number, epoch_cum_tree_hash = get_epoch_data(
            scid, self.nodes[0], EPOCH_LENGTH)
        mark_logs(
            "epoch_number = {}, epoch_cum_tree_hash = {}".format(
                epoch_cum_tree_hash, epoch_number), self.nodes, DEBUG_MODE)

        # (2) node0 create a cert_ep0 for funding node1 1.0 coins
        addr_node1 = self.nodes[1].getnewaddress()
        bwt_amount = Decimal("1.0")
        amounts = [{"address": addr_node1, "amount": bwt_amount}]

        quality = 0
        proof = mcTest.create_test_proof("sc1", scid_swapped, epoch_number,
                                         quality, MBTR_SC_FEE, FT_SC_FEE,
                                         epoch_cum_tree_hash, constant,
                                         [addr_node1], [bwt_amount])

        mark_logs(
            "Node 0 performs a bwd transfer of {} coins to Node1 address {}".
            format(bwt_amount, addr_node1), self.nodes, DEBUG_MODE)
        try:
            cert_ep0 = self.nodes[0].sc_send_certificate(
                scid, epoch_number, quality, epoch_cum_tree_hash, proof,
                amounts, FT_SC_FEE, MBTR_SC_FEE, CERT_FEE)
            assert (len(cert_ep0) > 0)
            mark_logs("Certificate is {}".format(cert_ep0), self.nodes,
                      DEBUG_MODE)
            self.sync_all()
        except JSONRPCException, e:
            errorString = e.error['message']
            mark_logs(
                "Send certificate failed with reason {}".format(errorString),
                self.nodes, DEBUG_MODE)
            assert (False)
    def prepare_lines(self):
        pool = Pool()
        Period = pool.get('account.period')
        Move = pool.get('account.move')

        move_lines = []
        line_move_ids = []
        move, = Move.create([{
            'period':
            Period.find(self.company.id, date=self.date),
            'journal':
            self.journal.id,
            'date':
            self.date,
            'origin':
            str(self),
        }])
        self.write([self], {
            'move': move.id,
        })

        for line in self.lines:
            if line.account_new:
                account_new = line.account_new
            else:
                self.raise_user_error("No ha ingresado la cuenta de Bancos")

            if self.post_check_type == 'receipt':
                debit = Decimal('0.00')
                credit = line.amount
                total = line.amount
            else:
                debit = line.amount
                credit = Decimal('0.00')
                total = line.amount

            move_lines.append({
                'description':
                self.number,
                'debit':
                debit,
                'credit':
                credit,
                'account':
                line.account.id,
                'move':
                move.id,
                'journal':
                self.journal.id,
                'period':
                Period.find(self.company.id, date=self.date),
            })

            if self.post_check_type == 'receipt':
                debit = total
                credit = Decimal(0.0)
            else:
                debit = Decimal(0.0)
                credit = total

            move_lines.append({
                'description':
                self.number,
                'debit':
                debit,
                'credit':
                credit,
                'account':
                line.account_new.id,
                'move':
                move.id,
                'journal':
                self.journal.id,
                'period':
                Period.find(self.company.id, date=self.date),
                'date':
                self.date,
            })
        return move_lines
Esempio n. 7
0
    def run_test(self):
        # Check that there's no UTXO on none of the nodes
        assert_equal(len(self.nodes[0].listunspent()), 0)
        assert_equal(len(self.nodes[1].listunspent()), 0)
        assert_equal(len(self.nodes[2].listunspent()), 0)

        self.log.info("Mining blocks...")

        self.nodes[0].generate(1)

        walletinfo = self.nodes[0].getwalletinfo()
        assert_equal(walletinfo['immature_balance'], 50)
        assert_equal(walletinfo['balance'], 0)

        self.sync_all([self.nodes[0:3]])
        self.nodes[1].generate(101)
        self.sync_all([self.nodes[0:3]])

        assert_equal(self.nodes[0].getbalance(), 50)
        assert_equal(self.nodes[1].getbalance(), 50)
        assert_equal(self.nodes[2].getbalance(), 0)

        # Check that only first and second nodes have UTXOs
        utxos = self.nodes[0].listunspent()
        assert_equal(len(utxos), 1)
        assert_equal(len(self.nodes[1].listunspent()), 1)
        assert_equal(len(self.nodes[2].listunspent()), 0)

        self.log.info("test gettxout")
        confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"]
        # First, outputs that are unspent both in the chain and in the
        # mempool should appear with or without include_mempool
        txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=False)
        assert_equal(txout['value'], 50)
        txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=True)
        assert_equal(txout['value'], 50)

        # Send 21 PTC from 0 to 2 using sendtoaddress call.
        # Locked memory should use at least 32 bytes to sign each transaction
        self.log.info("test getmemoryinfo")
        memory_before = self.nodes[0].getmemoryinfo()
        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
        mempool_txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
        memory_after = self.nodes[0].getmemoryinfo()
        assert(memory_before['locked']['used'] + 64 <= memory_after['locked']['used'])

        self.log.info("test gettxout (second part)")
        # utxo spent in mempool should be visible if you exclude mempool
        # but invisible if you include mempool
        txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, False)
        assert_equal(txout['value'], 50)
        txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, True)
        assert txout is None
        # new utxo from mempool should be invisible if you exclude mempool
        # but visible if you include mempool
        txout = self.nodes[0].gettxout(mempool_txid, 0, False)
        assert txout is None
        txout1 = self.nodes[0].gettxout(mempool_txid, 0, True)
        txout2 = self.nodes[0].gettxout(mempool_txid, 1, True)
        # note the mempool tx will have randomly assigned indices
        # but 10 will go to node2 and the rest will go to node0
        balance = self.nodes[0].getbalance()
        assert_equal(set([txout1['value'], txout2['value']]), set([10, balance]))
        walletinfo = self.nodes[0].getwalletinfo()
        assert_equal(walletinfo['immature_balance'], 0)

        # Have node0 mine a block, thus it will collect its own fee.
        self.nodes[0].generate(1)
        self.sync_all([self.nodes[0:3]])

        # Exercise locking of unspent outputs
        unspent_0 = self.nodes[2].listunspent()[0]
        unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
        assert_raises_rpc_error(-8, "Invalid parameter, expected locked output", self.nodes[2].lockunspent, True, [unspent_0])
        self.nodes[2].lockunspent(False, [unspent_0])
        assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0])
        assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20)
        assert_equal([unspent_0], self.nodes[2].listlockunspent())
        self.nodes[2].lockunspent(True, [unspent_0])
        assert_equal(len(self.nodes[2].listlockunspent()), 0)
        assert_raises_rpc_error(-8, "Invalid parameter, unknown transaction",
                                self.nodes[2].lockunspent, False,
                                [{"txid": "0000000000000000000000000000000000", "vout": 0}])
        assert_raises_rpc_error(-8, "Invalid parameter, vout index out of bounds",
                                self.nodes[2].lockunspent, False,
                                [{"txid": unspent_0["txid"], "vout": 999}])

        # Have node1 generate 100 blocks (so node0 can recover the fee)
        self.nodes[1].generate(100)
        self.sync_all([self.nodes[0:3]])

        # node0 should end up with 100 btc in block rewards plus fees, but
        # minus the 21 plus fees sent to node2
        assert_equal(self.nodes[0].getbalance(), 100 - 21)
        assert_equal(self.nodes[2].getbalance(), 21)

        # Node0 should have two unspent outputs.
        # Create a couple of transactions to send them to node2, submit them through
        # node1, and make sure both node0 and node2 pick them up properly:
        node0utxos = self.nodes[0].listunspent(1)
        assert_equal(len(node0utxos), 2)

        # create both transactions
        txns_to_send = []
        for utxo in node0utxos:
            inputs = []
            outputs = {}
            inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]})
            outputs[self.nodes[2].getnewaddress()] = utxo["amount"] - 3
            raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
            txns_to_send.append(self.nodes[0].signrawtransactionwithwallet(raw_tx))

        # Have node 1 (miner) send the transactions
        self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True)
        self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True)

        # Have node1 mine a block to confirm transactions:
        self.nodes[1].generate(1)
        self.sync_all([self.nodes[0:3]])

        assert_equal(self.nodes[0].getbalance(), 0)
        assert_equal(self.nodes[2].getbalance(), 94)

        # Verify that a spent output cannot be locked anymore
        spent_0 = {"txid": node0utxos[0]["txid"], "vout": node0utxos[0]["vout"]}
        assert_raises_rpc_error(-8, "Invalid parameter, expected unspent output", self.nodes[0].lockunspent, False, [spent_0])

        # Send 10 PTC normal
        address = self.nodes[0].getnewaddress("test")
        fee_per_byte = Decimal('0.001') / 1000
        self.nodes[2].settxfee(fee_per_byte * 1000)
        txid = self.nodes[2].sendtoaddress(address, 10, "", "", False)
        self.nodes[2].generate(1)
        self.sync_all([self.nodes[0:3]])
        node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('84'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid)))
        assert_equal(self.nodes[0].getbalance(), Decimal('10'))

        # Send 10 PTC with subtract fee from amount
        txid = self.nodes[2].sendtoaddress(address, 10, "", "", True)
        self.nodes[2].generate(1)
        self.sync_all([self.nodes[0:3]])
        node_2_bal -= Decimal('10')
        assert_equal(self.nodes[2].getbalance(), node_2_bal)
        node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('20'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid)))

        # Sendmany 10 PTC
        txid = self.nodes[2].sendmany('', {address: 10}, 0, "", [])
        self.nodes[2].generate(1)
        self.sync_all([self.nodes[0:3]])
        node_0_bal += Decimal('10')
        node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid)))
        assert_equal(self.nodes[0].getbalance(), node_0_bal)

        # Sendmany 10 PTC with subtract fee from amount
        txid = self.nodes[2].sendmany('', {address: 10}, 0, "", [address])
        self.nodes[2].generate(1)
        self.sync_all([self.nodes[0:3]])
        node_2_bal -= Decimal('10')
        assert_equal(self.nodes[2].getbalance(), node_2_bal)
        node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid)))

        # Test ResendWalletTransactions:
        # Create a couple of transactions, then start up a fourth
        # node (nodes[3]) and ask nodes[0] to rebroadcast.
        # EXPECT: nodes[3] should have those transactions in its mempool.
        txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
        txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
        sync_mempools(self.nodes[0:2])

        self.start_node(3)
        connect_nodes_bi(self.nodes, 0, 3)
        sync_blocks(self.nodes)

        relayed = self.nodes[0].resendwallettransactions()
        assert_equal(set(relayed), {txid1, txid2})
        sync_mempools(self.nodes)

        assert(txid1 in self.nodes[3].getrawmempool())

        # Exercise balance rpcs
        assert_equal(self.nodes[0].getwalletinfo()["unconfirmed_balance"], 1)
        assert_equal(self.nodes[0].getunconfirmedbalance(), 1)

        # check if we can list zero value tx as available coins
        # 1. create raw_tx
        # 2. hex-changed one output to 0.0
        # 3. sign and send
        # 4. check if recipient (node0) can list the zero value tx
        usp = self.nodes[1].listunspent()
        inputs = [{"txid": usp[0]['txid'], "vout": usp[0]['vout']}]
        outputs = {self.nodes[1].getnewaddress(): 49.998, self.nodes[0].getnewaddress(): 11.11}

        raw_tx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000")  # replace 11.11 with 0.0 (int32)
        signed_raw_tx = self.nodes[1].signrawtransactionwithwallet(raw_tx)
        decoded_raw_tx = self.nodes[1].decoderawtransaction(signed_raw_tx['hex'])
        zero_value_txid = decoded_raw_tx['txid']
        self.nodes[1].sendrawtransaction(signed_raw_tx['hex'])

        self.sync_all()
        self.nodes[1].generate(1)  # mine a block
        self.sync_all()

        unspent_txs = self.nodes[0].listunspent()  # zero value tx must be in listunspents output
        found = False
        for uTx in unspent_txs:
            if uTx['txid'] == zero_value_txid:
                found = True
                assert_equal(uTx['amount'], Decimal('0'))
        assert(found)

        # do some -walletbroadcast tests
        self.stop_nodes()
        self.start_node(0, ["-walletbroadcast=0"])
        self.start_node(1, ["-walletbroadcast=0"])
        self.start_node(2, ["-walletbroadcast=0"])
        connect_nodes_bi(self.nodes, 0, 1)
        connect_nodes_bi(self.nodes, 1, 2)
        connect_nodes_bi(self.nodes, 0, 2)
        self.sync_all([self.nodes[0:3]])

        txid_not_broadcast = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2)
        tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast)
        self.nodes[1].generate(1)  # mine a block, tx should not be in there
        self.sync_all([self.nodes[0:3]])
        assert_equal(self.nodes[2].getbalance(), node_2_bal)  # should not be changed because tx was not broadcasted

        # now broadcast from another node, mine a block, sync, and check the balance
        self.nodes[1].sendrawtransaction(tx_obj_not_broadcast['hex'])
        self.nodes[1].generate(1)
        self.sync_all([self.nodes[0:3]])
        node_2_bal += 2
        tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast)
        assert_equal(self.nodes[2].getbalance(), node_2_bal)

        # create another tx
        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2)

        # restart the nodes with -walletbroadcast=1
        self.stop_nodes()
        self.start_node(0)
        self.start_node(1)
        self.start_node(2)
        connect_nodes_bi(self.nodes, 0, 1)
        connect_nodes_bi(self.nodes, 1, 2)
        connect_nodes_bi(self.nodes, 0, 2)
        sync_blocks(self.nodes[0:3])

        self.nodes[0].generate(1)
        sync_blocks(self.nodes[0:3])
        node_2_bal += 2

        # tx should be added to balance because after restarting the nodes tx should be broadcast
        assert_equal(self.nodes[2].getbalance(), node_2_bal)

        # send a tx with value in a string (PR#6380 +)
        txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2")
        tx_obj = self.nodes[0].gettransaction(txid)
        assert_equal(tx_obj['amount'], Decimal('-2'))

        txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.0001")
        tx_obj = self.nodes[0].gettransaction(txid)
        assert_equal(tx_obj['amount'], Decimal('-0.0001'))

        # check if JSON parser can handle scientific notation in strings
        txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e-4")
        tx_obj = self.nodes[0].gettransaction(txid)
        assert_equal(tx_obj['amount'], Decimal('-0.0001'))

        # This will raise an exception because the amount type is wrong
        assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4")

        # This will raise an exception since generate does not accept a string
        assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2")

        # Import address and private key to check correct behavior of spendable unspents
        # 1. Send some coins to generate new UTXO
        address_to_import = self.nodes[2].getnewaddress()
        txid = self.nodes[0].sendtoaddress(address_to_import, 1)
        self.nodes[0].generate(1)
        self.sync_all([self.nodes[0:3]])

        # 2. Import address from node2 to node1
        self.nodes[1].importaddress(address_to_import)

        # 3. Validate that the imported address is watch-only on node1
        assert(self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"])

        # 4. Check that the unspents after import are not spendable
        assert_array_result(self.nodes[1].listunspent(),
                            {"address": address_to_import},
                            {"spendable": False})

        # 5. Import private key of the previously imported address on node1
        priv_key = self.nodes[2].dumpprivkey(address_to_import)
        self.nodes[1].importprivkey(priv_key)

        # 6. Check that the unspents are now spendable on node1
        assert_array_result(self.nodes[1].listunspent(),
                            {"address": address_to_import},
                            {"spendable": True})

        # Mine a block from node0 to an address from node1
        coinbase_addr = self.nodes[1].getnewaddress()
        block_hash = self.nodes[0].generatetoaddress(1, coinbase_addr)[0]
        coinbase_txid = self.nodes[0].getblock(block_hash)['tx'][0]
        self.sync_all([self.nodes[0:3]])

        # Check that the txid and balance is found by node1
        self.nodes[1].gettransaction(coinbase_txid)

        # check if wallet or blockchain maintenance changes the balance
        self.sync_all([self.nodes[0:3]])
        blocks = self.nodes[0].generate(2)
        self.sync_all([self.nodes[0:3]])
        balance_nodes = [self.nodes[i].getbalance() for i in range(3)]
        block_count = self.nodes[0].getblockcount()

        # Check modes:
        #   - True: unicode escaped as \u....
        #   - False: unicode directly as UTF-8
        for mode in [True, False]:
            self.nodes[0].rpc.ensure_ascii = mode
            # unicode check: Basic Multilingual Plane, Supplementary Plane respectively
            for label in [u'б€б‹аБаА', u'№…Ё']:
                addr = self.nodes[0].getnewaddress()
                self.nodes[0].setlabel(addr, label)
                assert_equal(self.nodes[0].getaddressinfo(addr)['label'], label)
                assert(label in self.nodes[0].listlabels())
        self.nodes[0].rpc.ensure_ascii = True  # restore to default

        # maintenance tests
        maintenance = [
            '-rescan',
            '-reindex',
            '-zapwallettxes=1',
            '-zapwallettxes=2',
            # disabled until issue is fixed: https://github.com/bitcoin/bitcoin/issues/7463
            # '-salvagewallet',
        ]
        chainlimit = 6
        for m in maintenance:
            self.log.info("check " + m)
            self.stop_nodes()
            # set lower ancestor limit for later
            self.start_node(0, [m, "-limitancestorcount=" + str(chainlimit)])
            self.start_node(1, [m, "-limitancestorcount=" + str(chainlimit)])
            self.start_node(2, [m, "-limitancestorcount=" + str(chainlimit)])
            if m == '-reindex':
                # reindex will leave rpc warm up "early"; Wait for it to finish
                wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)])
            assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)])

        # Exercise listsinceblock with the last two blocks
        coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0])
        assert_equal(coinbase_tx_1["lastblock"], blocks[1])
        assert_equal(len(coinbase_tx_1["transactions"]), 1)
        assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1])
        assert_equal(len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0)

        # ==Check that wallet prefers to use coins that don't exceed mempool limits =====

        # Get all non-zero utxos together
        chain_addrs = [self.nodes[0].getnewaddress(), self.nodes[0].getnewaddress()]
        singletxid = self.nodes[0].sendtoaddress(chain_addrs[0], self.nodes[0].getbalance(), "", "", True)
        self.nodes[0].generate(1)
        node0_balance = self.nodes[0].getbalance()
        # Split into two chains
        rawtx = self.nodes[0].createrawtransaction([{"txid": singletxid, "vout": 0}], {chain_addrs[0]: node0_balance / 2 - Decimal('0.01'), chain_addrs[1]: node0_balance / 2 - Decimal('0.01')})
        signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
        singletxid = self.nodes[0].sendrawtransaction(signedtx["hex"])
        self.nodes[0].generate(1)

        # Make a long chain of unconfirmed payments without hitting mempool limit
        # Each tx we make leaves only one output of change on a chain 1 longer
        # Since the amount to send is always much less than the outputs, we only ever need one output
        # So we should be able to generate exactly chainlimit txs for each original output
        sending_addr = self.nodes[1].getnewaddress()
        txid_list = []
        for i in range(chainlimit * 2):
            txid_list.append(self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001')))
        assert_equal(self.nodes[0].getmempoolinfo()['size'], chainlimit * 2)
        assert_equal(len(txid_list), chainlimit * 2)

        # Without walletrejectlongchains, we will still generate a txid
        # The tx will be stored in the wallet but not accepted to the mempool
        extra_txid = self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001'))
        assert(extra_txid not in self.nodes[0].getrawmempool())
        assert(extra_txid in [tx["txid"] for tx in self.nodes[0].listtransactions()])
        self.nodes[0].abandontransaction(extra_txid)
        total_txs = len(self.nodes[0].listtransactions("*", 99999))

        # Try with walletrejectlongchains
        # Double chain limit but require combining inputs, so we pass SelectCoinsMinConf
        self.stop_node(0)
        self.start_node(0, extra_args=["-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)])

        # wait for loadmempool
        timeout = 10
        while (timeout > 0 and len(self.nodes[0].getrawmempool()) < chainlimit * 2):
            time.sleep(0.5)
            timeout -= 0.5
        assert_equal(len(self.nodes[0].getrawmempool()), chainlimit * 2)

        node0_balance = self.nodes[0].getbalance()
        # With walletrejectlongchains we will not create the tx and store it in our wallet.
        assert_raises_rpc_error(-4, "Transaction has too long of a mempool chain", self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('0.01'))

        # Verify nothing new in wallet
        assert_equal(total_txs, len(self.nodes[0].listtransactions("*", 99999)))

        # Test getaddressinfo. Note that these addresses are taken from disablewallet.py
        assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].getaddressinfo, "3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy")
        address_info = self.nodes[0].getaddressinfo("mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ")
        assert_equal(address_info['address'], "mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ")
        assert_equal(address_info["scriptPubKey"], "76a9144e3854046c7bd1594ac904e4793b6a45b36dea0988ac")
        assert not address_info["ismine"]
        assert not address_info["iswatchonly"]
        assert not address_info["isscript"]
Esempio n. 8
0
def a(x):
    return Decimal(x) / (1 - Decimal(x) - Decimal(x)**2)
Esempio n. 9
0
def decimal_round(value, num=2):
    save_num = '{:.' + str(num) + 'f}'
    return save_num.format(Decimal(str(value)))
    rs.append(rs_state)
    #print selected_dynamics['step-number'].iloc[j],":", rs
    proc_table=pd.read_table(state_main_dir+str(rs_state)+'/processtable', delimiter = r'\s+', skiprows = [0], names=['proc-id', 'saddle-e', 'prefactor', 'product-id', 'product-e', 'product-prefactor', 'barrier', 'rate', 'repeats'])
    barrier.append(proc_table[proc_table['proc-id']==selected_dynamics['process-id'].iloc[j]]['saddle-e'].iloc[0])
    try:
      index = rs.index(selected_dynamics['product-id'].iloc[j])
      del rs[index:]
      del barrier[index:]
    except:
      pass
    if selected_dynamics['product-id'].iloc[j]==end:
       rs.append(selected_dynamics['product-id'].iloc[j])
       t_akmc = selected_dynamics['total-time'].iloc[j]
       #print "reached", end
       break
 overall_barrier.write("%10s %6d%3s%-6d %12.4f %12.4f %12.4f %.6E\n"%(str(start_coord)+'-->'+str(end_coord), rs_state,'-->', end, minima[start_coord][1], max(barrier)-minima[start_coord][1], states_e[end], Decimal(str(t_akmc))))
 overall_barrier.flush()
 #find and output trajectories and energy profile for each transition
 for j in range(len(rs)):
     state_n = rs[j]
     try:
       output.write("%8d  %4.2f %12.4f\n"%(state_n, float(j), states_e[state_n]))
       output.write("%8d  %4.2f %12.4f\n"%(state_n+0.5, float(j+0.5), barrier[j]))
     except:
       pass
     os.chdir(state_main_dir+str(state_n))
     atoms = read('reactant.con',index=0)
     Au_coord, surface_Au, surface_atom = get_coord(atoms)
     log_structures.write(atoms)
     del atoms[surface_atom]
     log_cores.write(atoms)
Esempio n. 11
0
def phi(n):
    cf = [1] * (n + 1)
    return Decimal(h(n, cf)) / Decimal(k(n, cf))
Esempio n. 12
0
def test_bumpfee_metadata(rbf_node, dest_address):
    rbfid = rbf_node.sendtoaddress(dest_address, Decimal("0.00100000"), "comment value", "to value")
    bumped_tx = rbf_node.bumpfee(rbfid)
    bumped_wtx = rbf_node.gettransaction(bumped_tx["txid"])
    assert_equal(bumped_wtx["comment"], "comment value")
    assert_equal(bumped_wtx["to"], "to value")
Esempio n. 13
0
def test_nonrbf_bumpfee_fails(peer_node, dest_address):
    # cannot replace a non RBF transaction (from node which did not enable RBF)
    not_rbfid = peer_node.sendtoaddress(dest_address, Decimal("0.00090000"))
    assert_raises_rpc_error(-4, "not BIP 125 replaceable", peer_node.bumpfee, not_rbfid)
Esempio n. 14
0
 def get_point_on_line(self):
     # solve for y when x=1
     y = (self.constant_term - Decimal(self.normal_vector[0]))/Decimal(self.normal_vector[1])
     return [1, y]
Esempio n. 15
0
def check_json_precision():
    """Make sure json library being used does not lose precision converting BTC values"""
    n = Decimal("20000000.00000003")
    satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8)
    if satoshis != 2000000000000003:
        raise RuntimeError("JSON encode/decode loses precision")
Esempio n. 16
0
 def parse_amount(self, x):
     if x.strip() == '!':
         return '!'
     p = pow(10, self.amount_edit.decimal_point())
     return int(p * Decimal(x.strip()))
Esempio n. 17
0
def satoshi_round(amount):
    return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
Esempio n. 18
0
 def __init__(self, profiling_dir, device_id):
     self._profiling_dir = profiling_dir
     self._device_id = device_id
     self._op_time_cache = {}
     self._total_time = Decimal('0.0')
from decimal import Decimal

from corehq.apps.accounting.models import (
    FeatureType,
    SoftwarePlanEdition,
    UNLIMITED_FEATURE_USAGE,
)

BOOTSTRAP_CONFIG = {
    (SoftwarePlanEdition.COMMUNITY, False, False): {
        'role': 'community_plan_v0',
        'product_rate': dict(),
        'feature_rates': {
            FeatureType.USER: dict(monthly_limit=50, per_excess_fee=Decimal('1.00')),
            FeatureType.SMS: dict(monthly_limit=0),
        }
    },
    (SoftwarePlanEdition.STANDARD, False, False): {
        'role': 'standard_plan_v0',
        'product_rate': dict(monthly_fee=Decimal('100.00')),
        'feature_rates': {
            FeatureType.USER: dict(monthly_limit=100, per_excess_fee=Decimal('1.00')),
            FeatureType.SMS: dict(monthly_limit=100),
        }
    },
    (SoftwarePlanEdition.PRO, False, False): {
        'role': 'pro_plan_v0',
        'product_rate': dict(monthly_fee=Decimal('500.00')),
        'feature_rates': {
            FeatureType.USER: dict(monthly_limit=500, per_excess_fee=Decimal('1.00')),
            FeatureType.SMS: dict(monthly_limit=500),
Esempio n. 20
0
 def test_base_json_conv(self):
     assert isinstance(base_json_conv(numpy.bool_(1)), bool) is True
     assert isinstance(base_json_conv(numpy.int64(1)), int) is True
     assert isinstance(base_json_conv(set([1])), list) is True
     assert isinstance(base_json_conv(Decimal('1.0')), float) is True
     assert isinstance(base_json_conv(uuid.uuid4()), str) is True
Esempio n. 21
0
def generate_mask_pattern(model, dl, target_class):
    mask_tanh_t = Variable(torch.tensor(mask_tanh.copy()).to(device=device),
                           requires_grad=True)
    pattern_tanh_t = Variable(torch.tensor(
        pattern_tanh.copy()).to(device=device),
                              requires_grad=True)
    mask_upsapler = nn.Upsample(scale_factor=UPSAMPLE_SIZE, mode="nearest")

    # Define optimizer
    # if args.dataset == "mnist":
    #     criterion = nn.NLLLoss()
    # else:
    criterion = nn.CrossEntropyLoss()

    opt = Adam([mask_tanh_t, pattern_tanh_t], lr=LR, betas=BETAS)

    cost = INIT_COST

    # best optimization results
    mask_best = None
    mask_upsample_best = None
    pattern_best = None
    reg_best = float("inf")

    # logs and counters for adjusting balance cost
    logs = []
    cost_set_counter = 0
    cost_up_counter = 0
    cost_down_counter = 0
    cost_up_flag = False
    cost_down_flag = False

    # counter for early stop
    early_stop_counter = 0
    early_stop_reg_best = reg_best

    # loop start
    for step in range(STEPS):
        # record loss for all mini-batches
        loss_ce_list = []
        loss_reg_list = []
        loss_list = []
        loss_acc_list = []

        for img, _ in dl:
            # Forward
            label = torch.Tensor([target_class] *
                                 img.shape[0]).long().to(device=device)
            img = img.permute(0, 3, 1, 2).to(device=device)

            mask_t = torch.tanh(mask_tanh_t) / (2 - EPSILON) + 0.5
            mask_t = mask_t.repeat(1, 1, IMG_COLOR).unsqueeze(0)
            mask_t_t = mask_t.permute(0, 3, 1, 2)
            mask_t = mask_upsapler(mask_t_t)
            mask_t = mask_t[:, :, :IMG_ROWS, :IMG_COLS]
            rev_mask_t = 1 - mask_t

            pattern_t = (torch.tanh(pattern_tanh_t) /
                         (2 - EPSILON) + 0.5) * 255.0
            pattern_t = pattern_t.unsqueeze(0)
            pattern_t = pattern_t.permute(0, 3, 1, 2)

            X_t = rev_mask_t * img + mask_t * pattern_t
            if NORMALIZE:
                X_t = X_t / 255.0

            if args.dataset == "mnist":
                _ = model(X_t.float())
                out = hook_fn_feat_layer.outputs
            else:
                out = model(X_t.float())
            loss_ce = criterion(out, label)

            if REGULARIZATION is None:
                loss_reg = 0
            elif REGULARIZATION is "l1":
                loss_reg = mask_t.abs().sum() / IMG_COLOR
            elif REGULARIZATION is "l2":
                loss_reg = torch.sqrt(torch.square(mask_t).sum()) / IMG_COLOR

            loss = loss_ce + cost * loss_reg
            loss_acc = (out.argmax(-1) == label).float().sum() / len(label)

            model.zero_grad()
            loss.backward()
            opt.step()

            loss_ce_list.append(loss_ce.item())
            loss_reg_list.append(loss_reg.item())
            loss_list.append(loss.item())
            loss_acc_list.append(loss_acc.item())

        avg_loss_ce = np.mean(loss_ce_list)
        avg_loss_reg = np.mean(loss_reg_list)
        avg_loss = np.mean(loss_list)
        avg_loss_acc = np.mean(loss_acc_list)

        # check to save best mask or not
        if avg_loss_acc >= ATTACK_SUCC_THRESHOLD and avg_loss_reg < reg_best:
            mask_best = mask_t_t[0, 0, ...].data.cpu().numpy()
            mask_upsample_best = mask_t[0, 0, ...].data.cpu().numpy()
            pattern_best = pattern_t.data.cpu().squeeze(0).permute(1, 2,
                                                                   0).numpy()
            reg_best = avg_loss_reg

        _log_txt = (
            "step: %3d, cost: %.2E, attack: %.3f, loss: %f, ce: %f, reg: %f, reg_best: %f"
            % (
                step,
                Decimal(cost),
                avg_loss_acc,
                avg_loss,
                avg_loss_ce,
                avg_loss_reg,
                reg_best,
            ))
        # verbose
        if VERBOSE != 0:
            if VERBOSE == 2 or step % (STEPS // 10) == 0:
                print(_log_txt)

        # save log
        logs.append(_log_txt)

        # check early stop
        if EARLY_STOP:
            # only terminate if a valid attack has been found
            if reg_best < float("inf"):
                if reg_best >= EARLY_STOP_THRESHOLD * early_stop_reg_best:
                    early_stop_counter += 1
                else:
                    early_stop_counter = 0
            early_stop_reg_best = min(reg_best, early_stop_reg_best)

            if (cost_down_flag and cost_up_flag
                    and early_stop_counter >= EARLY_STOP_PATIENCE):
                print("early stop")
                break

        # check cost modification
        if cost == 0 and avg_loss_acc >= ATTACK_SUCC_THRESHOLD:
            cost_set_counter += 1
            if cost_set_counter >= PATIENCE:
                cost = INIT_COST
                cost_up_counter = 0
                cost_down_counter = 0
                cost_up_flag = False
                cost_down_flag = False
                print("initialize cost to %.2E" % Decimal(self.cost))
        else:
            cost_set_counter = 0

        if avg_loss_acc >= ATTACK_SUCC_THRESHOLD:
            cost_up_counter += 1
            cost_down_counter = 0
        else:
            cost_up_counter = 0
            cost_down_counter += 1

        if cost_up_counter >= PATIENCE:
            cost_up_counter = 0
            if VERBOSE == 2:
                print("up cost from %.2E to %.2E" %
                      (Decimal(cost), Decimal(cost * COST_MULTIPLIER_UP)))
            cost *= COST_MULTIPLIER
            cost_up_flag = True
        elif cost_down_counter >= COST_MULTIPLIER_UP:
            cost_down_counter = 0
            if VERBOSE == 2:
                print("down cost from %.2E to %.2E" %
                      (Decimal(cost), Decimal(cost / COST_MULTIPLIER_DOWN)))
            cost /= COST_MULTIPLIER_DOWN
            cost_down_flag = True

    #         if self.save_tmp:
    #             self.save_tmp_func(step)

    # save the final version
    if mask_best is None:
        mask_best = mask_t_t[0, 0, ...].data.cpu().numpy()
        mask_upsample_best = mask_t[0, 0, ...].data.cpu().numpy()
        pattern_best = pattern_t.data.cpu().squeeze(0).permute(1, 2, 0).numpy()

    return pattern_best, mask_best, mask_upsample_best, logs
Esempio n. 22
0
 def set_btc_per_usd(cls, usd_per_btc: str):
   cls.usd_per_btc = Decimal(usd_per_btc)
Esempio n. 23
0
class sc_cert_change(BitcoinTestFramework):

    alert_filename = None

    def setup_chain(self, split=False):
        print("Initializing test directory " + self.options.tmpdir)
        initialize_chain_clean(self.options.tmpdir, NUMB_OF_NODES)
        self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
        with open(self.alert_filename, 'w'):
            pass  # Just open then close to create zero-length file

    def setup_network(self, split=False):
        self.nodes = []

        self.nodes = start_nodes(
            NUMB_OF_NODES,
            self.options.tmpdir,
            extra_args=[[
                '-debug=py', '-debug=sc', '-debug=mempool', '-debug=net',
                '-debug=cert', '-debug=zendoo_mc_cryptolib',
                '-scproofqueuesize=0', '-logtimemicros=1'
            ]] * NUMB_OF_NODES)

        for k in range(0, NUMB_OF_NODES - 1):
            connect_nodes_bi(self.nodes, k, k + 1)

        sync_blocks(self.nodes[1:NUMB_OF_NODES])
        sync_mempools(self.nodes[1:NUMB_OF_NODES])
        self.is_network_split = split
        self.sync_all()

    def run_test(self):
        '''
        1) node0 create sidechain with 10.0 coins
           reach epoch 0
        2) node0 create a cert_ep0 for funding node1 1.0 coins
           reach epoch 1
        3) node0 create a cert_ep1 for funding node2 2.0 coins
           reach epoch 2
        4) node1 create a cert_ep2 for funding node3 3.0 coins: he uses cert_ep0 as input, change will be obtained out of a fee=0.0001
           node0 mine a new block
        5) node1 has just one UTXO with the change of cert_ep0 and it should be 0.999, it sends 0.5 to node3
           node0 mine a new block
        6) node3 has 0.5 balance and 3.0 immature from cert_ep2
        '''

        # cross chain transfer amounts
        creation_amount = Decimal("10.0")

        mark_logs("Node 0 generates {} block".format(MINIMAL_SC_HEIGHT),
                  self.nodes, DEBUG_MODE)
        self.nodes[0].generate(MINIMAL_SC_HEIGHT)
        self.sync_all()

        # (1) node0 create sidechain with 10.0 coins
        mcTest = CertTestUtils(self.options.tmpdir, self.options.srcdir)
        vk = mcTest.generate_params("sc1")
        constant = generate_random_field_element_hex()
        cmdInput = {
            "version": 0,
            "withdrawalEpochLength": EPOCH_LENGTH,
            "toaddress": "dada",
            "amount": creation_amount,
            "wCertVk": vk,
            "constant": constant
        }

        ret = self.nodes[0].sc_create(cmdInput)
        creating_tx = ret['txid']
        scid = ret['scid']
        scid_swapped = str(swap_bytes(scid))
        mark_logs(
            "Node 0 created the SC spending {} coins via tx {}.".format(
                creation_amount, creating_tx), self.nodes, DEBUG_MODE)
        self.sync_all()

        decoded_tx = self.nodes[0].getrawtransaction(creating_tx, 1)
        assert_equal(scid, decoded_tx['vsc_ccout'][0]['scid'])
        mark_logs("created SC id: {}".format(scid), self.nodes, DEBUG_MODE)

        mark_logs("Node0 generates 5 blocks to achieve end of epoch",
                  self.nodes, DEBUG_MODE)
        prev_epoch_block_hash = self.nodes[0].getblockhash(
            self.nodes[0].getblockcount())
        self.nodes[0].generate(5)
        self.sync_all()
        epoch_number, epoch_cum_tree_hash = get_epoch_data(
            scid, self.nodes[0], EPOCH_LENGTH)
        mark_logs(
            "epoch_number = {}, epoch_cum_tree_hash = {}".format(
                epoch_cum_tree_hash, epoch_number), self.nodes, DEBUG_MODE)

        # (2) node0 create a cert_ep0 for funding node1 1.0 coins
        addr_node1 = self.nodes[1].getnewaddress()
        bwt_amount = Decimal("1.0")
        amounts = [{"address": addr_node1, "amount": bwt_amount}]

        quality = 0
        proof = mcTest.create_test_proof("sc1", scid_swapped, epoch_number,
                                         quality, MBTR_SC_FEE, FT_SC_FEE,
                                         epoch_cum_tree_hash, constant,
                                         [addr_node1], [bwt_amount])

        mark_logs(
            "Node 0 performs a bwd transfer of {} coins to Node1 address {}".
            format(bwt_amount, addr_node1), self.nodes, DEBUG_MODE)
        try:
            cert_ep0 = self.nodes[0].sc_send_certificate(
                scid, epoch_number, quality, epoch_cum_tree_hash, proof,
                amounts, FT_SC_FEE, MBTR_SC_FEE, CERT_FEE)
            assert (len(cert_ep0) > 0)
            mark_logs("Certificate is {}".format(cert_ep0), self.nodes,
                      DEBUG_MODE)
            self.sync_all()
        except JSONRPCException, e:
            errorString = e.error['message']
            mark_logs(
                "Send certificate failed with reason {}".format(errorString),
                self.nodes, DEBUG_MODE)
            assert (False)

        mark_logs("Node0 generates 5 blocks to achieve end of epoch",
                  self.nodes, DEBUG_MODE)
        self.nodes[0].generate(5)
        self.sync_all()
        epoch_number, epoch_cum_tree_hash = get_epoch_data(
            scid, self.nodes[0], EPOCH_LENGTH)
        mark_logs(
            "epoch_number = {}, epoch_cum_tree_hash = {}".format(
                epoch_number, epoch_cum_tree_hash), self.nodes, DEBUG_MODE)

        # (3) node0 create a cert_ep1 for funding node1 1.0 coins
        addr_node2 = self.nodes[2].getnewaddress()
        bwt_amount = Decimal("2.0")
        amounts = [{"address": addr_node2, "amount": bwt_amount}]

        quality = 1
        proof = mcTest.create_test_proof("sc1", scid_swapped, epoch_number,
                                         quality, MBTR_SC_FEE, FT_SC_FEE,
                                         epoch_cum_tree_hash, constant,
                                         [addr_node2], [bwt_amount])

        mark_logs(
            "Node 0 performs a bwd transfer of {} coins to Node2 address {}".
            format(bwt_amount, addr_node2), self.nodes, DEBUG_MODE)
        try:
            cert_ep1 = self.nodes[0].sc_send_certificate(
                scid, epoch_number, quality, epoch_cum_tree_hash, proof,
                amounts, FT_SC_FEE, MBTR_SC_FEE, CERT_FEE)
            assert (len(cert_ep1) > 0)
            mark_logs("Certificate is {}".format(cert_ep1), self.nodes,
                      DEBUG_MODE)
            self.sync_all()
        except JSONRPCException, e:
            errorString = e.error['message']
            mark_logs(
                "Send certificate failed with reason {}".format(errorString),
                self.nodes, DEBUG_MODE)
            assert (False)
Esempio n. 24
0
def get_trade(
    trade_id: int = 3132964,
    product: Pair = Pair.ETH_BTC,
    side: Side = Side.SELL,
    created_at: datetime = datetime(2018, 1, 2, 1, 18, 26, 406, pytz.UTC),
    size: Decimal = Decimal("0.00768977"),
    price: Decimal = Decimal("0.0575"),
    fee: Decimal = Decimal("0"),
    usd_per_btc: Decimal = Decimal("13815.04"),
    wash: bool = False
) -> Series:
  """
  Default values added for both example and ease to create a trade
  :param trade_id: int trade id
  :param product: Pair
  :param side: Side
  :param created_at: time ie 2018-01-02T01:18:26.406Z
  :param size: Decimal
  :param price: Decimal
  :param fee: Decimal
  :param usd_per_btc: Decimal
  :param wash: bool

  size_unit: ETH
  total: 0.00442161775
  price/fee/total unit: BTC
  total in usd: 6.10

  :return Series representing the trade

  """
  # Values are assumed to be passed in as positive, this is a sanity check
  if price < 0 or size < 0 or fee < 0 or (
    not usd_per_btc.is_nan() and usd_per_btc < 0
  ):
    raise ValueError("Passed negative value to helper method.")
  quote: Asset = product.get_quote_asset()
  total = - price * size - fee if Side.BUY == side else price * size -fee
  if quote == Asset.USD:
    usd_per_btc = Decimal("nan")
    value = abs(total)
  elif quote == Asset.BTC:
    value = abs(USD_ROUNDER(total * usd_per_btc))
  else:
    raise ValueError("Pair not supported: " + str(product))

  trade_series = Series({
    ID: trade_id,
    PAIR: product,
    SIDE: side,
    TIME: created_at,
    SIZE: size,
    SIZE_UNIT: product.get_base_asset(),
    PRICE: price,
    FEE: fee,
    P_F_T_UNIT: quote,
    TOTAL: total,
    USD_PER_BTC: usd_per_btc,
    VALUE_IN_USD: value
  })
  if wash:
    trade_series[ADJUSTED_VALUE] = value
    trade_series[ADJUSTED_SIZE] = Decimal(0)
    trade_series[WASH_P_L_IDS] = []
  return trade_series
               default=[["ETH", "coin_gecko_api"],
                        ["DAI", "coin_gecko_api"]]),
 "kill_switch_enabled":
     ConfigVar(key="kill_switch_enabled",
               prompt="Would you like to enable the kill switch? (Yes/No) >>> ",
               required_if=paper_trade_disabled,
               type_str="bool",
               default=False,
               validator=validate_bool),
 "kill_switch_rate":
     ConfigVar(key="kill_switch_rate",
               prompt="At what profit/loss rate would you like the bot to stop? "
                      "(e.g. -5 equals 5 percent loss) >>> ",
               type_str="decimal",
               default=-100,
               validator=lambda v: validate_decimal(v, Decimal(-100), Decimal(100)),
               required_if=lambda: global_config_map["kill_switch_enabled"].value),
 "telegram_enabled":
     ConfigVar(key="telegram_enabled",
               prompt="Would you like to enable telegram? >>> ",
               type_str="bool",
               default=False,
               required_if=lambda: False),
 "telegram_token":
     ConfigVar(key="telegram_token",
               prompt="What is your telegram token? >>> ",
               required_if=lambda: False),
 "telegram_chat_id":
     ConfigVar(key="telegram_chat_id",
               prompt="What is your telegram chat id? >>> ",
               required_if=lambda: False),
class Migration(migrations.Migration):

    dependencies = [
        ("order", "0064_auto_20181016_0819"),
        ("payment", "0002_transfer_payment_to_payment_method"),
    ]

    operations = [
        migrations.AlterField(
            model_name="order",
            name="discount_amount",
            field=django_prices.models.MoneyField(
                currency=settings.DEFAULT_CURRENCY,
                decimal_places=2,
                default=saleor.core.taxes.zero_money,
                max_digits=12,
            ),
        ),
        migrations.AlterField(
            model_name="order",
            name="total_gross",
            field=django_prices.models.MoneyField(
                currency=settings.DEFAULT_CURRENCY,
                decimal_places=2,
                default=saleor.core.taxes.zero_money,
                max_digits=12,
            ),
        ),
        migrations.AlterField(
            model_name="order",
            name="total_net",
            field=django_prices.models.MoneyField(
                currency=settings.DEFAULT_CURRENCY,
                decimal_places=2,
                default=saleor.core.taxes.zero_money,
                max_digits=12,
            ),
        ),
        migrations.RemoveField(model_name="payment", name="order"),
        migrations.DeleteModel(name="Payment"),
        migrations.AlterField(
            model_name="orderevent",
            name="type",
            field=models.CharField(
                choices=[
                    ("PLACED", "placed"),
                    ("PLACED_FROM_DRAFT", "draft_placed"),
                    ("OVERSOLD_ITEMS", "oversold_items"),
                    ("ORDER_MARKED_AS_PAID", "marked_as_paid"),
                    ("CANCELED", "canceled"),
                    ("ORDER_FULLY_PAID", "order_paid"),
                    ("UPDATED", "updated"),
                    ("EMAIL_SENT", "email_sent"),
                    ("PAYMENT_CAPTURED", "captured"),
                    ("PAYMENT_REFUNDED", "refunded"),
                    ("PAYMENT_VOIDED", "voided"),
                    ("FULFILLMENT_CANCELED", "fulfillment_canceled"),
                    ("FULFILLMENT_RESTOCKED_ITEMS", "restocked_items"),
                    ("FULFILLMENT_FULFILLED_ITEMS", "fulfilled_items"),
                    ("TRACKING_UPDATED", "tracking_updated"),
                    ("NOTE_ADDED", "note_added"),
                    ("OTHER", "other"),
                ],
                max_length=255,
            ),
        ),
        migrations.AlterField(
            model_name="orderline",
            name="tax_rate",
            field=models.DecimalField(
                decimal_places=2, default=Decimal("0.0"), max_digits=5
            ),
        ),
    ]
Esempio n. 27
0
 def fail_accept(self, node, error_msg, txid, sign, redeem_script=""):
     assert_raises_rpc_error(-26, error_msg, send_to_witness, use_p2wsh=1, node=node, utxo=getutxo(txid), pubkey=self.pubkey[0], encode_p2sh=False, amount=Decimal("49.998"), sign=sign, insert_redeem_script=redeem_script)
Esempio n. 28
0
 def discount_net(self, count):
     if self.discount_enabled:
         discount = queryAdapter(self.context, ICartItemDiscount)
         if discount:
             return discount.net(self.net, self.vat, count)
     return Decimal(0)
Esempio n. 29
0
def test_product_category_discount_amount_with_minimum_price(rf):
    # Buy X amount of Y get Z discount from Y
    request, shop, _ = initialize_test(rf, False)

    basket = get_basket(request)
    supplier = get_default_supplier()

    single_product_price = Decimal("50")
    single_product_min_price = Decimal("40")
    discount_amount_value = Decimal("200")  # will exceed the minimum price
    quantity = 2

    # the expected discount amount should not be greater than the products
    expected_discount_amount = basket.create_price(
        single_product_price) * quantity

    category = CategoryFactory()

    # create basket rule that requires 2 products in basket
    product = create_product(printable_gibberish(),
                             shop=shop,
                             supplier=supplier,
                             default_price=single_product_price)
    shop_product = ShopProduct.objects.get(shop=shop, product=product)
    shop_product.minimum_price_value = single_product_min_price
    shop_product.save()
    shop_product.categories.add(category)

    basket.add_product(supplier=supplier,
                       shop=shop,
                       product=product,
                       quantity=quantity)
    basket.shipping_method = get_shipping_method(shop=shop)
    basket.save()

    rule = ProductsInBasketCondition.objects.create(quantity=2)
    rule.products.add(product)
    rule.save()

    campaign = BasketCampaign.objects.create(active=True,
                                             shop=shop,
                                             name="test",
                                             public_name="test")
    campaign.conditions.add(rule)

    DiscountFromCategoryProducts.objects.create(
        campaign=campaign,
        discount_amount=discount_amount_value,
        category=category)
    assert rule.matches(basket, [])
    basket.uncache()

    # the discount amount should not exceed the minimum price. as the configued discount
    # will exceed, it should limit the discount amount
    final_lines = basket.get_final_lines()
    expected_discount_amount = basket.create_price(
        (single_product_price - single_product_min_price) * quantity)
    original_price = basket.create_price(single_product_price) * quantity
    line = final_lines[0]
    assert line.discount_amount == expected_discount_amount
    assert basket.total_price == original_price - expected_discount_amount
Esempio n. 30
0
def factor(fee, sign, direction):
    return ((1000000 - Decimal(fee)) / 1000000) ** ((sign + direction) // 2) * sign