Beispiel #1
0
def verifyBlkTx(tx2,blkid, idx,coinbase):  
    blktx = BlockTx.query.filter(and_(BlockTx.blk_id==blkid, BlockTx.idx==idx)).first()
    tx1 = Tx.query.filter(Tx.id==blktx.tx_id).first()
    
    assert tx1.hash      == double_sha256(tx2['__data__'])[::-1]
    assert tx1.version   == tx2['version'] 
    assert tx1.lock_time == tx2['lockTime'] 
    assert tx1.tx_size   == len(tx2['__data__'])

    assert tx1.in_count  == len(tx2['txIn'])
    assert tx1.out_count == len(tx2['txOut'])
    assert tx1.coinbase  == coinbase

    invalue = 0
    for i, txin2 in enumerate(tx2['txIn']):
        txin1 = TxIn.query.filter(and_(TxIn.tx_id == tx1.id, TxIn.tx_idx==i)).first()
        txin2['tx_idx'] = i
        verifyTxIn(txin1, txin2,coinbase)
        if not coinbase:
            prevtx = read_tx(txin2['prevout_hash'])
            invalue += prevtx['txOut'][txin2['prevout_n']]['value'] 
         
    outvalue = 0
    for j, txout2 in enumerate(tx2['txOut']):
        txout1 = TxOut.query.filter(and_(TxOut.tx_id == tx1.id, TxOut.tx_idx==j)).first()
        txout2['tx_idx'] = j
        verifyTxOut(txout1, txout2)
        outvalue += txout2['value']

    assert tx1.out_value == outvalue
    assert tx1.in_value  == invalue
    if not coinbase:
        assert tx1.fee       == (invalue-outvalue)
    return tx1
Beispiel #2
0
def parse_Transaction(vds, has_nTime=False):
  #For format see transaction serialization
  #in https://bitcoincore.org/en/segwit_wallet_dev/
  d = {}
  flag = 0
  #We need to exclude witness and flag data
  #for txid calculation
  tx_data = ''
  # bitcointools uses len(vds.input), but this assumes vds contains a single tx!
  start_pos = vds.read_cursor

  tx_data_pos = vds.read_cursor
  d['version'] = vds.read_int32()
  if has_nTime:
    d['nTime'] = vds.read_uint32()
  tx_data += vds.input[tx_data_pos:vds.read_cursor]

  tx_data_pos = vds.read_cursor
  n_vin = vds.read_compact_size()
  if (n_vin == 0):
    flag = vds.read_compact_size()
    if (flag != 1):
      raise Exception('Segwit error: expecting 1 got {}'.format(flag))

  if (flag):
    tx_data_pos = vds.read_cursor
    n_vin = vds.read_compact_size()

  d['txIn'] = []
  for i in xrange(n_vin):
    d['txIn'].append(parse_TxIn(vds))
    d['txIn'][i]['txWitness'] = []
  n_vout = vds.read_compact_size()
  d['txOut'] = []
  for i in xrange(n_vout):
    d['txOut'].append(parse_TxOut(vds))

  tx_data += vds.input[tx_data_pos:vds.read_cursor]

  if (flag):
    read_witness_data(vds, n_vin, d)

  tx_data_pos = vds.read_cursor
  d['lockTime'] = vds.read_uint32()
  tx_data += vds.input[tx_data_pos:vds.read_cursor]

  d['size'] = vds.read_cursor - start_pos

  # Now we know the tx end pos, read the actual tx hash (not txid)
  d['__hash__'] = double_sha256(vds.input[start_pos:vds.read_cursor])
  d['__data__'] = tx_data
  # Get SegWit vsize too
  d['vsize'] = int(math.ceil((len(tx_data) * 3 + d['size']) / 4.0))

  return d
Beispiel #3
0
def verifyBlkTx(tx2, blkid, idx, coinbase):
    blktx = BlockTx.query.filter(
        and_(BlockTx.blk_id == blkid, BlockTx.idx == idx)).first()
    tx1 = Tx.query.filter(Tx.id == blktx.tx_id).first()

    assert tx1.hash == double_sha256(tx2['__data__'])[::-1]
    assert tx1.version == tx2['version']
    assert tx1.lock_time == tx2['lockTime']
    assert tx1.tx_size == len(tx2['__data__'])

    assert tx1.in_count == len(tx2['txIn'])
    assert tx1.out_count == len(tx2['txOut'])
    assert tx1.coinbase == coinbase

    invalue = 0
    for i, txin2 in enumerate(tx2['txIn']):
        txin1 = TxIn.query.filter(and_(TxIn.tx_id == tx1.id,
                                       TxIn.tx_idx == i)).first()
        txin2['tx_idx'] = i
        verifyTxIn(txin1, txin2, coinbase)
        if not coinbase:
            prevtx = read_tx(txin2['prevout_hash'])
            invalue += prevtx['txOut'][txin2['prevout_n']]['value']

    outvalue = 0
    for j, txout2 in enumerate(tx2['txOut']):
        txout1 = TxOut.query.filter(
            and_(TxOut.tx_id == tx1.id, TxOut.tx_idx == j)).first()
        txout2['tx_idx'] = j
        verifyTxOut(txout1, txout2)
        outvalue += txout2['value']

    assert tx1.out_value == outvalue
    assert tx1.in_value == invalue
    if not coinbase:
        assert tx1.fee == (invalue - outvalue)
    return tx1
Beispiel #4
0
 def block_header_hash(chain, header):
     return util.double_sha256(header)
Beispiel #5
0
 def transaction_hash(chain, binary_tx):
     return util.double_sha256(binary_tx)
Beispiel #6
0
def mixup_blocks(store, ds, count, datadir_chain_id=None, seed=None):
    bytes_done = 0
    offsets = []

    for i in xrange(count):
        if ds.read_cursor + 8 <= len(ds.input):
            offsets.append(ds.read_cursor)
            magic = ds.read_bytes(4)
            length = ds.read_int32()
            ds.read_cursor += length
            if ds.read_cursor <= len(ds.input):
                continue
        raise IOError("End of input after %d blocks" % i)

    if seed > 1 and seed <= count:
        for i in xrange(0, seed * int(count / seed), seed):
            offsets[i:i + seed] = offsets[i:i + seed][::-1]
    elif seed == -3:
        for i in xrange(0, 3 * int(count / 3), 3):
            offsets[i:i + 3] = offsets[i + 1:i + 3] + [offsets[i]]
        print offsets
    elif seed:
        offsets = offsets[::-1]  # XXX want random

    for offset in offsets:
        ds.read_cursor = offset
        magic = ds.read_bytes(4)
        length = ds.read_int32()

        # Assume blocks obey the respective policy if they get here.
        chain_id = datadir_chain_id
        if chain_id is None:
            rows = store.selectall(
                """
                SELECT chain.chain_id
                  FROM chain
                  JOIN magic ON (chain.magic_id = magic.magic_id)
                 WHERE magic.magic = ?""", (store.binin(magic), ))
            if len(rows) == 1:
                chain_id = rows[0][0]
        if chain_id is None:
            ds.read_cursor = offset
            raise ValueError(
                "Chain not found for magic number %s in block file at"
                " offset %d.", repr(magic), offset)
            break

        # XXX pasted out of DataStore.import_blkdat
        end = ds.read_cursor + length

        hash = util.double_sha256(ds.input[ds.read_cursor:ds.read_cursor + 80])
        # XXX should decode target and check hash against it to
        # avoid loading garbage data.  But not for merged-mined or
        # CPU-mined chains that use different proof-of-work
        # algorithms.  Time to resurrect policy_id?

        block_row = store.selectrow(
            """
            SELECT block_id, block_height, block_chain_work,
                   block_nTime, block_total_seconds,
                   block_total_satoshis, block_satoshi_seconds
              FROM block
             WHERE block_hash = ?
        """, (store.hashin(hash), ))

        if block_row:
            # Block header already seen.  Don't import the block,
            # but try to add it to the chain.
            if chain_id is not None:
                b = {
                    "block_id": block_row[0],
                    "height": block_row[1],
                    "chain_work": store.binout_int(block_row[2]),
                    "nTime": block_row[3],
                    "seconds": block_row[4],
                    "satoshis": block_row[5],
                    "ss": block_row[6]
                }
                if store.selectrow(
                        """
                    SELECT 1
                      FROM chain_candidate
                     WHERE block_id = ?
                       AND chain_id = ?""", (b['block_id'], chain_id)):
                    store.log.info("block %d already in chain %d",
                                   b['block_id'], chain_id)
                    b = None
                else:
                    if b['height'] == 0:
                        b['hashPrev'] = GENESIS_HASH_PREV
                    else:
                        b['hashPrev'] = 'dummy'  # Fool adopt_orphans.
                    store.offer_block_to_chains(b, frozenset([chain_id]))
        else:
            b = store.parse_block(ds, chain_id, magic, length)
            b["hash"] = hash
            chain_ids = frozenset([] if chain_id is None else [chain_id])
            store.import_block(b, chain_ids=chain_ids)
            if ds.read_cursor != end:
                store.log.debug("Skipped %d bytes at block end",
                                end - ds.read_cursor)

        bytes_done += length
        if bytes_done >= store.commit_bytes:
            store.log.debug("commit")
            store.commit()
            bytes_done = 0

    if bytes_done > 0:
        store.commit()
Beispiel #7
0
 def merkle_hash(chain, hashes):
     return util.double_sha256(hashes)
Beispiel #8
0
 def block_header_hash(chain, header):
     return util.double_sha256(header)
Beispiel #9
0
 def transaction_hash(chain, binary_tx):
     return util.double_sha256(binary_tx)
Beispiel #10
0
def mixup_blocks(store, ds, count, datadir_chain_id = None, seed = None):
    bytes_done = 0
    offsets = []

    for i in xrange(count):
        if ds.read_cursor + 8 <= len(ds.input):
            offsets.append(ds.read_cursor)
            magic = ds.read_bytes(4)
            length = ds.read_int32()
            ds.read_cursor += length
            if ds.read_cursor <= len(ds.input):
                continue
        raise IOError("End of input after %d blocks" % i)

    if seed > 1 and seed <= count:
        for i in xrange(0, seed * int(count/seed), seed):
            offsets[i : i + seed] = offsets[i : i + seed][::-1]
    elif seed == -3:
        for i in xrange(0, 3 * int(count/3), 3):
            offsets[i : i + 3] = offsets[i+1 : i + 3] + [offsets[i]]
        print offsets
    elif seed:
        offsets = offsets[::-1]  # XXX want random

    for offset in offsets:
        ds.read_cursor = offset
        magic = ds.read_bytes(4)
        length = ds.read_int32()

        # Assume blocks obey the respective policy if they get here.
        chain_id = datadir_chain_id
        if chain_id is None:
            rows = store.selectall("""
                SELECT chain.chain_id
                  FROM chain
                  JOIN magic ON (chain.magic_id = magic.magic_id)
                 WHERE magic.magic = ?""",
                                   (store.binin(magic),))
            if len(rows) == 1:
                chain_id = rows[0][0]
        if chain_id is None:
            ds.read_cursor = offset
            raise ValueError(
                "Chain not found for magic number %s in block file at"
                " offset %d.", repr(magic), offset)
            break

        # XXX pasted out of DataStore.import_blkdat
        end = ds.read_cursor + length

        hash = util.double_sha256(
            ds.input[ds.read_cursor : ds.read_cursor + 80])
        # XXX should decode target and check hash against it to
        # avoid loading garbage data.  But not for merged-mined or
        # CPU-mined chains that use different proof-of-work
        # algorithms.  Time to resurrect policy_id?

        block_row = store.selectrow("""
            SELECT block_id, block_height, block_chain_work,
                   block_nTime, block_total_seconds,
                   block_total_satoshis, block_satoshi_seconds
              FROM block
             WHERE block_hash = ?
        """, (store.hashin(hash),))

        if block_row:
            # Block header already seen.  Don't import the block,
            # but try to add it to the chain.
            if chain_id is not None:
                b = {
                    "block_id":   block_row[0],
                    "height":     block_row[1],
                    "chain_work": store.binout_int(block_row[2]),
                    "nTime":      block_row[3],
                    "seconds":    block_row[4],
                    "satoshis":   block_row[5],
                    "ss":         block_row[6]}
                if store.selectrow("""
                    SELECT 1
                      FROM chain_candidate
                     WHERE block_id = ?
                       AND chain_id = ?""",
                                (b['block_id'], chain_id)):
                    store.log.info("block %d already in chain %d",
                                   b['block_id'], chain_id)
                    b = None
                else:
                    if b['height'] == 0:
                        b['hashPrev'] = GENESIS_HASH_PREV
                    else:
                        b['hashPrev'] = 'dummy'  # Fool adopt_orphans.
                    store.offer_block_to_chains(b, frozenset([chain_id]))
        else:
            b = store.parse_block(ds, chain_id, magic, length)
            b["hash"] = hash
            chain_ids = frozenset([] if chain_id is None else [chain_id])
            store.import_block(b, chain_ids = chain_ids)
            if ds.read_cursor != end:
                store.log.debug("Skipped %d bytes at block end",
                                end - ds.read_cursor)

        bytes_done += length
        if bytes_done >= store.commit_bytes:
            store.log.debug("commit")
            store.commit()
            bytes_done = 0

    if bytes_done > 0:
        store.commit()
Beispiel #11
0
 def merkle_hash(chain, hashes):
     return util.double_sha256(hashes)
Beispiel #12
0
 def block_header_hash(chain, ds):
     return util.double_sha256(
         ds.input[ds.read_cursor : ds.read_cursor + 80])