Exemplo n.º 1
0
def validate(db, source, timestamp, value, fee_fraction_int, text,
             block_index):
    problems = []

    # For SQLite3
    if timestamp > config.MAX_INT or value > config.MAX_INT or fee_fraction_int > config.MAX_INT:
        problems.append('integer overflow')

    if util.enabled('max_fee_fraction'):
        if fee_fraction_int >= config.UNIT:
            problems.append('fee fraction greater than or equal to 1')
    else:
        if fee_fraction_int > 4294967295:
            problems.append('fee fraction greater than 42.94967295')

    if timestamp < 0:
        problems.append('negative timestamp')

    if not source:
        problems.append('null source address')

    # Check previous broadcast in this feed.
    cursor = db.cursor()
    broadcasts = list(
        cursor.execute(
            '''SELECT * FROM broadcasts WHERE (status = ? AND source = ?) ORDER BY tx_index ASC''',
            ('valid', source)))
    cursor.close()
    if broadcasts:
        last_broadcast = broadcasts[-1]
        if last_broadcast['locked']:
            problems.append('locked feed')
        elif timestamp <= last_broadcast['timestamp']:
            problems.append('feed timestamps not monotonically increasing')

    if util.enabled('options_require_memo') and text and text.lower(
    ).startswith('options'):
        ops_spl = text.split(" ")
        if len(ops_spl) == 2:
            try:
                options_int = int(ops_spl.pop())

                if (options_int > config.MAX_INT) or (options_int < 0):
                    problems.append('integer overflow')
                elif options_int > config.ADDRESS_OPTION_MAX_VALUE:
                    problems.append('options out of range')
            except:
                problems.append('options not an integer')

    return problems
Exemplo n.º 2
0
def compose(db, source, timestamp, value, fee_fraction, text):

    # Store the fee fraction as an integer.
    fee_fraction_int = int(fee_fraction * 1e8)

    problems = validate(db, source, timestamp, value, fee_fraction_int, text,
                        util.CURRENT_BLOCK_INDEX)
    if problems:
        raise exceptions.ComposeError(problems)

    data = message_type.pack(ID)

    # always use custom length byte instead of problematic usage of 52p format and make sure to encode('utf-8') for length
    if util.enabled('broadcast_pack_text'):
        data += struct.pack(FORMAT, timestamp, value, fee_fraction_int)
        data += VarIntSerializer.serialize(len(text.encode('utf-8')))
        data += text.encode('utf-8')
    else:
        if len(text) <= 52:
            curr_format = FORMAT + '{}p'.format(len(text) + 1)
        else:
            curr_format = FORMAT + '{}s'.format(len(text))

        data += struct.pack(curr_format, timestamp, value, fee_fraction_int,
                            text.encode('utf-8'))
    return (source, [], data)
Exemplo n.º 3
0
def pack(message_type_id, block_index=None):
    # pack message ID into 1 byte if not zero
    if util.enabled(
            'short_tx_type_id',
            block_index) and message_type_id > 0 and message_type_id < 256:
        return struct.pack(config.SHORT_TXTYPE_FORMAT, message_type_id)

    # pack into 4 bytes
    return struct.pack(config.TXTYPE_FORMAT, message_type_id)
Exemplo n.º 4
0
def exectracer(cursor, sql, bindings):
    # This means that all changes to database must use a very simple syntax.
    # TODO: Need sanity checks here.
    sql = sql.lower()

    if sql.startswith('create trigger') or sql.startswith('drop trigger'):
        # CREATE TRIGGER stmts may include an "insert" or "update" as part of them
        return True

    if isinstance(bindings, tuple):
        return True

    # Parse SQL.
    array = sql.split('(')[0].split(' ')
    command = array[0]
    if 'insert' in sql:
        category = array[2]
    elif 'update' in sql:
        category = array[1]
    else:
        # CREATE TABLE, etc
        return True

    db = cursor.getconnection()
    skip_tables = [
        'blocks',
        'transactions',
        'balances',
        'messages',
        'mempool',
        'assets',
        'suicides',
        'postqueue',  # These tables are ephemeral.
        'nonces',
        'storage'  # List message manually.
    ]
    skip_tables_block_messages = copy.copy(skip_tables)
    if command == 'update':
        # List message manually.
        skip_tables += ['contracts', 'proofofwork']

    # Record alteration in database.
    if category not in skip_tables:
        log.message(db, bindings['block_index'], command, category, bindings)

        # don't include memo as part of the messages hash
        #   until enhanced_sends are enabled
        if category == 'sends' and not util.enabled('enhanced_sends'):
            if isinstance(bindings, dict) and 'memo' in bindings:
                del bindings['memo']

        sorted_bindings = sorted(bindings.items()) if isinstance(
            bindings, dict) else [bindings]
        BLOCK_MESSAGES.append('{}{}{}'.format(command, category,
                                              sorted_bindings))

    return True
Exemplo n.º 5
0
def compose(db, source, destination, asset, quantity, memo=None, memo_is_hex=False, use_enhanced_send=None):
    # special case - enhanced_send replaces send by default when it is enabled
    #   but it can be explicitly disabled with an API parameter
    if util.enabled('enhanced_sends'):
        if use_enhanced_send is None or use_enhanced_send is True:
            return enhanced_send.compose(db, source, destination, asset, quantity, memo, memo_is_hex)
    elif memo is not None or use_enhanced_send is True:
        raise exceptions.ComposeError('enhanced sends are not enabled')

    return send1.compose(db, source, destination, asset, quantity)
Exemplo n.º 6
0
def validate(db, source, destination, asset, quantity, block_index):

    try:
        util.get_asset_id(db, asset, block_index)
    except AssetError:
        raise ValidateError('asset invalid')

    try:
        script.validate(source)
    except AddressError:
        raise ValidateError('source address invalid')

    try:
        script.validate(destination)
    except AddressError:
        raise ValidateError('destination address invalid')

    if asset == config.BTC:
        raise ValidateError('cannot send {}'.format(config.BTC))

    if type(quantity) != int:
        raise ValidateError('quantity not integer')

    if quantity > config.MAX_INT:
        raise ValidateError('quantity too large')

    if quantity <= 0:
        raise ValidateError('quantity non‐positive')

    if util.get_balance(db, source, asset) < quantity:
        raise BalanceError('balance insufficient')

    if util.enabled('options_require_memo'):
        # Check destination address options

        cursor = db.cursor()
        try:
            results = cursor.execute(
                'SELECT options FROM addresses WHERE address=?',
                (destination, ))
            if results:
                result = results.fetchone()
                if result and result[
                        'options'] & config.ADDRESS_OPTION_REQUIRE_MEMO:
                    raise ValidateError('destination requires memo')
        finally:
            cursor.close()
Exemplo n.º 7
0
def unpack(packed_data, block_index=None):
    message_type_id = None
    message_remainder = None

    if len(packed_data) > 1:
        # try to read 1 byte first
        if util.enabled('short_tx_type_id', block_index):
            message_type_id = struct.unpack(config.SHORT_TXTYPE_FORMAT,
                                            packed_data[:1])[0]
            if message_type_id > 0:
                message_remainder = packed_data[1:]
                return (message_type_id, message_remainder)

    # First message byte was 0.  We will read 4 bytes
    if len(packed_data) > 4:
        message_type_id = struct.unpack(config.TXTYPE_FORMAT,
                                        packed_data[:4])[0]
        message_remainder = packed_data[4:]

    return (message_type_id, message_remainder)
Exemplo n.º 8
0
def validate(db, source, destination, asset, quantity, memo_bytes, block_index):
    problems = []

    if asset == config.BTC:
        problems.append('cannot send {}'.format(config.BTC))

    if not isinstance(quantity, int):
        problems.append('quantity must be in satoshis')
        return problems

    if quantity < 0:
        problems.append('negative quantity')

    if quantity == 0:
        problems.append('zero quantity')

    # For SQLite3
    if quantity > config.MAX_INT:
        problems.append('integer overflow')

    # destination is always required
    if not destination:
        problems.append('destination is required')

    # check memo
    if memo_bytes is not None and len(memo_bytes) > MAX_MEMO_LENGTH:
        problems.append('memo is too long')

    if util.enabled('options_require_memo'):
        cursor = db.cursor()
        try:
            results = cursor.execute('SELECT options FROM addresses WHERE address=?', (destination,))
            if results:
                result = results.fetchone()
                if result and result['options'] & config.ADDRESS_OPTION_REQUIRE_MEMO:
                    if memo_bytes is None or (len(memo_bytes) == 0):
                        problems.append('destination requires memo')
        finally:
            cursor.close()

    return problems
Exemplo n.º 9
0
def validate(db, source, destination, asset, quantity, divisible, callable_,
             call_date, call_price, description, subasset_parent,
             subasset_longname, block_index):
    problems = []
    fee = 0

    if asset in (config.BTC, config.XCP):
        problems.append('cannot issue {} or {}'.format(config.BTC, config.XCP))

    if call_date is None:
        call_date = 0

    if call_price is None:
        call_price = 0.0

    if description is None:
        description = ""

    if divisible is None:
        divisible = True

    if isinstance(call_price, int):
        call_price = float(call_price)
    # ^ helps especially with calls from JS‐based clients, where parseFloat(15) returns 15 (not 15.0), which json takes as an int

    if not isinstance(quantity, int):
        problems.append('quantity must be in satoshis')
        return call_date, call_price, problems, fee, description, divisible, None, None

    if call_date and not isinstance(call_date, int):
        problems.append('call_date must be epoch integer')
        return call_date, call_price, problems, fee, description, divisible, None, None

    if call_price and not isinstance(call_price, float):
        problems.append('call_price must be a float')
        return call_date, call_price, problems, fee, description, divisible, None, None

    if quantity < 0:
        problems.append('negative quantity')

    if call_price < 0:
        problems.append('negative call price')

    if call_date < 0:
        problems.append('negative call date')

    # Callable, or not.
    if not callable_:
        call_date = 0
        call_price = 0.0

    # Valid re-issuance?
    cursor = db.cursor()
    cursor.execute(
        '''SELECT * FROM issuances \
                      WHERE (status = ? AND asset = ?)
                      ORDER BY tx_index ASC''', ('valid', asset))
    issuances = cursor.fetchall()
    cursor.close()
    reissued_asset_longname = None
    if issuances:
        reissuance = True
        last_issuance = issuances[-1]
        reissued_asset_longname = last_issuance['asset_longname']
        issuance_locked = False
        if util.enabled('issuance_lock_fix'):
            for issuance in issuances:
                if issuance['locked']:
                    issuance_locked = True
                    break
        elif last_issuance['locked']:
            # before the issuance_lock_fix, only the last issuance was checked
            issuance_locked = True

        if last_issuance['issuer'] != source:
            problems.append('issued by another address')
        if bool(last_issuance['divisible']) != bool(divisible):
            problems.append('cannot change divisibility')
        if bool(last_issuance['callable']) != bool(callable_):
            problems.append('cannot change callability')
        if last_issuance['call_date'] > call_date and call_date != 0:
            problems.append('cannot advance call date')
        if last_issuance['call_price'] > call_price:
            problems.append('cannot reduce call price')
        if issuance_locked and quantity:
            problems.append('locked asset and non‐zero quantity')
    else:
        reissuance = False
        if description.lower() == 'lock':
            problems.append('cannot lock a non‐existent asset')
        if destination:
            problems.append('cannot transfer a non‐existent asset')

    # validate parent ownership for subasset
    if subasset_longname is not None:
        cursor = db.cursor()
        cursor.execute(
            '''SELECT * FROM issuances \
                          WHERE (status = ? AND asset = ?)
                          ORDER BY tx_index ASC''', ('valid', subasset_parent))
        parent_issuances = cursor.fetchall()
        cursor.close()
        if parent_issuances:
            last_parent_issuance = parent_issuances[-1]
            if last_parent_issuance['issuer'] != source:
                problems.append('parent asset owned by another address')
        else:
            problems.append('parent asset not found')

    # validate subasset issuance is not a duplicate
    if subasset_longname is not None and not reissuance:
        cursor = db.cursor()
        cursor.execute(
            '''SELECT * FROM assets \
                          WHERE (asset_longname = ?)''', (subasset_longname, ))
        assets = cursor.fetchall()
        if len(assets) > 0:
            problems.append('subasset already exists')

        # validate that the actual asset is numeric
        if asset[:3] != 'ASP':
            problems.append('a subasset must be a numeric asset')

    # Check for existence of fee funds.
    if not reissuance:  # Pay fee only upon first issuance
        cursor = db.cursor()
        cursor.execute(
            '''SELECT * FROM balances WHERE (address = ? AND asset = ?)''',
            (source, config.XCP))
        balances = cursor.fetchall()
        cursor.close()

        if subasset_longname is not None:
            # subasset issuance is 10 ASP
            fee = int(10.0 * config.UNIT)
        elif len(asset) >= 13:
            fee = 0
        else:
            # custom names are 10 ASP
            fee = int(10.0 * config.UNIT)

        if fee and (not balances or balances[0]['quantity'] < fee):
            problems.append('insufficient funds')

    # For SQLite3
    call_date = min(call_date, config.MAX_INT)
    total = sum([issuance['quantity'] for issuance in issuances])
    assert isinstance(quantity, int)
    if total + quantity > config.MAX_INT:
        problems.append('total quantity overflow')

    if destination and quantity:
        problems.append('cannot issue and transfer simultaneously')

    # For SQLite3
    if util.enabled('integer_overflow_fix', block_index=block_index) and (
            fee > config.MAX_INT or quantity > config.MAX_INT):
        problems.append('integer overflow')

    return call_date, call_price, problems, fee, description, divisible, reissuance, reissued_asset_longname
Exemplo n.º 10
0
def parse(db, tx, message, message_type_id):
    issuance_parse_cursor = db.cursor()

    # Unpack message.
    try:
        subasset_longname = None
        if message_type_id == SUBASSET_ID:
            if not util.enabled('subassets', block_index=tx['block_index']):
                logger.warn("subassets are not enabled at block %s" %
                            tx['block_index'])
                raise exceptions.UnpackError

            # parse a subasset original issuance message
            asset_id, quantity, divisible, compacted_subasset_length = struct.unpack(
                SUBASSET_FORMAT, message[0:SUBASSET_FORMAT_LENGTH])
            description_length = len(
                message) - SUBASSET_FORMAT_LENGTH - compacted_subasset_length
            if description_length < 0:
                logger.warn("invalid subasset length: [issuance] tx [%s]: %s" %
                            (tx['tx_hash'], compacted_subasset_length))
                raise exceptions.UnpackError
            messages_format = '>{}s{}s'.format(compacted_subasset_length,
                                               description_length)
            compacted_subasset_longname, description = struct.unpack(
                messages_format, message[SUBASSET_FORMAT_LENGTH:])
            subasset_longname = util.expand_subasset_longname(
                compacted_subasset_longname)
            callable_, call_date, call_price = False, 0, 0.0
            try:
                description = description.decode('utf-8')
            except UnicodeDecodeError:
                description = ''
        elif len(message) >= LENGTH_2:  # Protocol change.
            if len(message) - LENGTH_2 <= 42:
                curr_format = FORMAT_2 + '{}p'.format(len(message) - LENGTH_2)
            else:
                curr_format = FORMAT_2 + '{}s'.format(len(message) - LENGTH_2)
            asset_id, quantity, divisible, callable_, call_date, call_price, description = struct.unpack(
                curr_format, message)

            call_price = round(call_price, 6)  # TODO: arbitrary
            try:
                description = description.decode('utf-8')
            except UnicodeDecodeError:
                description = ''
        else:
            if len(message) != LENGTH_1:
                raise exceptions.UnpackError
            asset_id, quantity, divisible = struct.unpack(FORMAT_1, message)
            callable_, call_date, call_price, description = False, 0, 0.0, ''
        try:
            asset = util.generate_asset_name(asset_id, tx['block_index'])
            status = 'valid'
        except exceptions.AssetIDError:
            asset = None
            status = 'invalid: bad asset name'
    except exceptions.UnpackError:
        asset, quantity, divisible, callable_, call_date, call_price, description = None, None, None, None, None, None, None
        status = 'invalid: could not unpack'

    # parse and validate the subasset from the message
    subasset_parent = None
    if status == 'valid' and subasset_longname is not None:  # Protocol change.
        try:
            # ensure the subasset_longname is valid
            util.validate_subasset_longname(subasset_longname)
            subasset_parent, subasset_longname = util.parse_subasset_from_asset_name(
                subasset_longname)
        except exceptions.AssetNameError:
            asset = None
            status = 'invalid: bad subasset name'

    reissuance = None
    fee = 0
    if status == 'valid':
        call_date, call_price, problems, fee, description, divisible, reissuance, reissued_asset_longname = validate(
            db,
            tx['source'],
            tx['destination'],
            asset,
            quantity,
            divisible,
            callable_,
            call_date,
            call_price,
            description,
            subasset_parent,
            subasset_longname,
            block_index=tx['block_index'])

        if problems:
            status = 'invalid: ' + '; '.join(problems)
        if not util.enabled('integer_overflow_fix',
                            block_index=tx['block_index']
                            ) and 'total quantity overflow' in problems:
            quantity = 0

    if tx['destination']:
        issuer = tx['destination']
        transfer = True
        quantity = 0
    else:
        issuer = tx['source']
        transfer = False

    # Debit fee.
    if status == 'valid':
        foundation_addy = config.FOUNDATION_ADDRESS_MAINNET
        if config.TESTNET:
            foundation_addy = config.FOUNDATION_ADDRESS_TESTNET
        util.transfer(db,
                      tx['source'],
                      foundation_addy,
                      config.XCP,
                      fee,
                      action="issuance fee",
                      event=tx['tx_hash'])

    # Lock?
    lock = False
    if status == 'valid':
        if description and description.lower() == 'lock':
            lock = True
            cursor = db.cursor()
            issuances = list(
                cursor.execute(
                    '''SELECT * FROM issuances \
                                               WHERE (status = ? AND asset = ?)
                                               ORDER BY tx_index ASC''',
                    ('valid', asset)))
            cursor.close()
            description = issuances[-1][
                'description']  # Use last description. (Assume previous issuance exists because tx is valid.)
        if not reissuance:
            # Add to table of assets.
            bindings = {
                'asset_id': str(asset_id),
                'asset_name': str(asset),
                'block_index': tx['block_index'],
                'asset_longname': subasset_longname,
            }
            sql = 'insert into assets values(:asset_id, :asset_name, :block_index, :asset_longname)'
            issuance_parse_cursor.execute(sql, bindings)

    if status == 'valid' and reissuance:
        # when reissuing, add the asset_longname to the issuances table for API lookups
        asset_longname = reissued_asset_longname
    else:
        asset_longname = subasset_longname

    # Add parsed transaction to message-type–specific table.
    bindings = {
        'tx_index': tx['tx_index'],
        'tx_hash': tx['tx_hash'],
        'block_index': tx['block_index'],
        'asset': asset,
        'quantity': quantity,
        'divisible': divisible,
        'source': tx['source'],
        'issuer': issuer,
        'transfer': transfer,
        'callable': callable_,
        'call_date': call_date,
        'call_price': call_price,
        'description': description,
        'fee_paid': fee,
        'locked': lock,
        'status': status,
        'asset_longname': asset_longname,
    }
    if "integer overflow" not in status:
        sql = 'insert into issuances values(:tx_index, :tx_hash, :block_index, :asset, :quantity, :divisible, :source, :issuer, :transfer, :callable, :call_date, :call_price, :description, :fee_paid, :locked, :status, :asset_longname)'
        issuance_parse_cursor.execute(sql, bindings)
    else:
        logger.warn("Not storing [issuance] tx [%s]: %s" %
                    (tx['tx_hash'], status))
        logger.debug("Bindings: %s" % (json.dumps(bindings), ))

    # Credit.
    if status == 'valid' and quantity:
        util.credit(db,
                    tx['source'],
                    asset,
                    quantity,
                    action="issuance",
                    event=tx['tx_hash'])

    issuance_parse_cursor.close()
Exemplo n.º 11
0
def construct(db,
              tx_info,
              encoding='auto',
              fee_per_kb=config.DEFAULT_FEE_PER_KB,
              estimate_fee_per_kb=None,
              estimate_fee_per_kb_nblocks=config.ESTIMATE_FEE_NBLOCKS,
              regular_dust_size=config.DEFAULT_REGULAR_DUST_SIZE,
              multisig_dust_size=config.DEFAULT_MULTISIG_DUST_SIZE,
              op_return_value=config.DEFAULT_OP_RETURN_VALUE,
              exact_fee=None,
              fee_provided=0,
              provided_pubkeys=None,
              dust_return_pubkey=None,
              allow_unconfirmed_inputs=False,
              unspent_tx_hash=None,
              custom_inputs=None,
              disable_utxo_locks=False,
              extended_tx_info=False):

    if estimate_fee_per_kb is None:
        estimate_fee_per_kb = config.ESTIMATE_FEE_PER_KB

    global UTXO_LOCKS

    desired_encoding = encoding
    (source, destination_outputs, data) = tx_info

    if dust_return_pubkey:
        dust_return_pubkey = binascii.unhexlify(dust_return_pubkey)

    # Source.
    # If public key is necessary for construction of (unsigned)
    # transaction, use the public key provided, or find it from the
    # blockchain.
    if source:
        script.validate(source)

    source_is_p2sh = script.is_p2sh(source)

    # Sanity checks.
    if exact_fee and not isinstance(exact_fee, int):
        raise exceptions.TransactionError('Exact fees must be in satoshis.')
    if not isinstance(fee_provided, int):
        raise exceptions.TransactionError('Fee provided must be in satoshis.')

    if UTXO_LOCKS is None and config.UTXO_LOCKS_MAX_ADDRESSES > 0:  # initialize if configured
        UTXO_LOCKS = util.DictCache(size=config.UTXO_LOCKS_MAX_ADDRESSES)
    '''Destinations'''

    # Destination outputs.
    # Replace multi‐sig addresses with multi‐sig pubkeys. Check that the
    # destination output isn’t a dust output. Set null values to dust size.
    destination_outputs_new = []
    for (address, value) in destination_outputs:

        # Value.
        if script.is_multisig(address):
            dust_size = multisig_dust_size
        else:
            dust_size = regular_dust_size
        if value == None:
            value = dust_size
        elif value < dust_size:
            raise exceptions.TransactionError('Destination output is dust.')

        # Address.
        script.validate(address)
        if script.is_multisig(address):
            destination_outputs_new.append(
                (backend.multisig_pubkeyhashes_to_pubkeys(
                    address, provided_pubkeys), value))
        else:
            destination_outputs_new.append((address, value))

    destination_outputs = destination_outputs_new
    destination_btc_out = sum(
        [value for address, value in destination_outputs])
    '''Data'''

    if data:
        # Data encoding methods (choose and validate).
        if encoding == 'auto':
            if len(data) + len(config.PREFIX) <= config.OP_RETURN_MAX_SIZE:
                encoding = 'opreturn'
            else:
                encoding = 'multisig'

        elif encoding not in ('pubkeyhash', 'multisig', 'opreturn'):
            raise exceptions.TransactionError('Unknown encoding‐scheme.')

        if encoding == 'multisig':
            # dust_return_pubkey should be set or explicitly set to False to use the default configured for the node
            #  the default for the node is optional so could fail
            if (source_is_p2sh and dust_return_pubkey is None) or (
                    dust_return_pubkey is False
                    and config.P2SH_DUST_RETURN_PUBKEY is None):
                raise exceptions.TransactionError(
                    "Can't use multisig encoding when source is P2SH and no dust_return_pubkey is provided."
                )
            elif dust_return_pubkey is False:
                dust_return_pubkey = binascii.unhexlify(
                    config.P2SH_DUST_RETURN_PUBKEY)

        # Divide data into chunks.
        if encoding == 'pubkeyhash':
            # Prefix is also a suffix here.
            chunk_size = 20 - 1 - 8
        elif encoding == 'multisig':
            # Two pubkeys, minus length byte, minus prefix, minus two nonces,
            # minus two sign bytes.
            chunk_size = (33 * 2) - 1 - 8 - 2 - 2
        elif encoding == 'opreturn':
            chunk_size = config.OP_RETURN_MAX_SIZE
            if len(data) + len(config.PREFIX) > chunk_size:
                raise exceptions.TransactionError(
                    'One `OP_RETURN` output per transaction.')
        data_array = list(chunks(data, chunk_size))

        # Data outputs.
        if encoding == 'multisig':
            data_value = multisig_dust_size
        elif encoding == 'opreturn':
            data_value = op_return_value
        else:
            # Pay‐to‐PubKeyHash, e.g.
            data_value = regular_dust_size
        data_output = (data_array, data_value)

        if not dust_return_pubkey:
            if encoding == 'multisig':
                dust_return_pubkey = get_dust_return_pubkey(
                    source, provided_pubkeys, encoding)
            else:
                dust_return_pubkey = None
    else:
        data_array = []
        data_output = None
        dust_return_pubkey = None

    data_btc_out = sum([data_value for data_chunk in data_array])
    '''Inputs'''

    # Calculate collective size of outputs, for fee calculation.
    p2pkhsize = 25 + 9
    if encoding == 'multisig':
        data_output_size = 81  # 71 for the data
    elif encoding == 'opreturn':
        data_output_size = 90  # 80 for the data
    else:
        data_output_size = p2pkhsize  # Pay‐to‐PubKeyHash (25 for the data?)
    outputs_size = (p2pkhsize * len(destination_outputs)) + (len(data_array) *
                                                             data_output_size)

    # Get inputs.
    multisig_inputs = not data

    # Array of UTXOs, as retrieved by listunspent function from gaspd
    if custom_inputs:
        use_inputs = unspent = custom_inputs
    else:
        if unspent_tx_hash is not None:
            unspent = backend.get_unspent_txouts(
                source,
                unconfirmed=allow_unconfirmed_inputs,
                unspent_tx_hash=unspent_tx_hash,
                multisig_inputs=multisig_inputs)
        else:
            unspent = backend.get_unspent_txouts(
                source,
                unconfirmed=allow_unconfirmed_inputs,
                multisig_inputs=multisig_inputs)

        # filter out any locked UTXOs to prevent creating transactions that spend the same UTXO when they're created at the same time
        if UTXO_LOCKS is not None and source in UTXO_LOCKS:
            unspentkeys = set(make_outkey(output) for output in unspent)
            filtered_unspentkeys = unspentkeys - set(UTXO_LOCKS[source].keys())
            unspent = [
                output for output in unspent
                if make_outkey(output) in filtered_unspentkeys
            ]

        unspent = backend.sort_unspent_txouts(unspent)
        logger.debug('Sorted candidate UTXOs: {}'.format(
            [print_coin(coin) for coin in unspent]))
        use_inputs = unspent

    # use backend estimated fee_per_kb
    if estimate_fee_per_kb:
        estimated_fee_per_kb = backend.fee_per_kb(estimate_fee_per_kb_nblocks)
        if estimated_fee_per_kb is not None:
            fee_per_kb = max(
                estimated_fee_per_kb,
                fee_per_kb)  # never drop below the default fee_per_kb

    logger.debug('Fee/KB {:.8f}'.format(fee_per_kb / config.UNIT))

    inputs = []
    btc_in = 0
    change_quantity = 0
    sufficient_funds = False
    final_fee = fee_per_kb
    desired_input_count = 1

    if encoding == 'multisig' and data_array and util.enabled('bytespersigop'):
        desired_input_count = len(data_array) * 2

    for coin in use_inputs:
        logger.debug('New input: {}'.format(print_coin(coin)))
        inputs.append(coin)
        btc_in += round(coin['amount'] * config.UNIT)

        size = 181 * len(inputs) + outputs_size + 10
        necessary_fee = int(size / 1000 * fee_per_kb)

        # If exact fee is specified, use that. Otherwise, calculate size of tx
        # and base fee on that (plus provide a minimum fee for selling GASP).
        if exact_fee:
            final_fee = exact_fee
        else:
            final_fee = max(fee_provided, necessary_fee)

        # Check if good.
        btc_out = destination_btc_out + data_btc_out
        change_quantity = btc_in - (btc_out + final_fee)
        logger.debug('Size: {} Fee: {:.8f} Change quantity: {:.8f} {}'.format(
            size, final_fee / config.UNIT, change_quantity / config.UNIT,
            config.BTC))
        # If change is necessary, must not be a dust output.
        if change_quantity == 0 or change_quantity >= regular_dust_size:
            sufficient_funds = True
            if len(inputs) >= desired_input_count:
                break

    if not sufficient_funds:
        # Approximate needed change, fee by with most recently calculated
        # quantities.
        btc_out = destination_btc_out + data_btc_out
        total_btc_out = btc_out + max(change_quantity, 0) + final_fee
        raise exceptions.BalanceError(
            'Insufficient {} at address {}. (Need approximately {} {}.) To spend unconfirmed coins, use the flag `--unconfirmed`. (Unconfirmed coins cannot be spent from multi‐sig addresses.)'
            .format(config.BTC, source, total_btc_out / config.UNIT,
                    config.BTC))

    # Lock the source's inputs (UTXOs) chosen for this transaction
    if UTXO_LOCKS is not None and not disable_utxo_locks:
        if source not in UTXO_LOCKS:
            UTXO_LOCKS[source] = cachetools.TTLCache(
                UTXO_LOCKS_PER_ADDRESS_MAXSIZE, config.UTXO_LOCKS_MAX_AGE)

        for input in inputs:
            UTXO_LOCKS[source][make_outkey(input)] = input

        logger.debug(
            "UTXO locks: Potentials ({}): {}, Used: {}, locked UTXOs: {}".
            format(len(unspent), [make_outkey(coin) for coin in unspent],
                   [make_outkey(input) for input in inputs],
                   list(UTXO_LOCKS[source].keys())))
    '''Finish'''

    # Change output.
    if change_quantity:
        if script.is_multisig(source):
            change_address = backend.multisig_pubkeyhashes_to_pubkeys(
                source, provided_pubkeys)
        else:
            change_address = source
        change_output = (change_address, change_quantity)
    else:
        change_output = None

    # in bitcoin core v0.12.1 a -bytespersigop was added that messes with bare multisig transactions,
    #  as a safeguard we fall back to pubkeyhash encoding when unsure
    # when len(inputs) > len(data_outputs) there's more bytes:sigops ratio and we can safely continue
    if encoding == 'multisig' and inputs and data_output and len(
            inputs) < len(data_array) * 2 and util.enabled('bytespersigop'):
        # if auto encoding we can do pubkeyhash encoding instead
        if desired_encoding == 'auto':
            return construct(db,
                             tx_info,
                             encoding='pubkeyhash',
                             fee_per_kb=fee_per_kb,
                             regular_dust_size=regular_dust_size,
                             multisig_dust_size=multisig_dust_size,
                             op_return_value=op_return_value,
                             exact_fee=exact_fee,
                             fee_provided=fee_provided,
                             provided_pubkeys=provided_pubkeys,
                             allow_unconfirmed_inputs=allow_unconfirmed_inputs,
                             unspent_tx_hash=unspent_tx_hash,
                             custom_inputs=custom_inputs)
        # otherwise raise exception
        else:
            raise exceptions.EncodingError(
                "multisig will be rejected by Bitcoin Core >= v0.12.1, you should use `encoding=auto` or `encoding=pubkeyhash`"
            )

    # Serialise inputs and outputs.
    unsigned_tx = serialise(encoding,
                            inputs,
                            destination_outputs,
                            data_output,
                            change_output,
                            dust_return_pubkey=dust_return_pubkey)
    unsigned_tx_hex = binascii.hexlify(unsigned_tx).decode('utf-8')
    '''Sanity Check'''

    from aspirelib.lib import blocks

    # Desired transaction info.
    (desired_source, desired_destination_outputs, desired_data) = tx_info
    desired_source = script.make_canonical(desired_source)
    desired_destination = script.make_canonical(
        desired_destination_outputs[0]
        [0]) if desired_destination_outputs else ''
    # NOTE: Include change in destinations for GASP transactions.
    # if change_output and not desired_data and desired_destination != config.UNSPENDABLE:
    #    if desired_destination == '':
    #        desired_destination = desired_source
    #    else:
    #        desired_destination += '-{}'.format(desired_source)
    # NOTE
    if desired_data == None:
        desired_data = b''

    # Parsed transaction info.
    try:
        parsed_source, parsed_destination, x, y, parsed_data = blocks._get_tx_info(
            unsigned_tx_hex)
    except exceptions.BTCOnlyError:
        # Skip GASP‐only transactions.
        if extended_tx_info:
            return {
                'btc_in': btc_in,
                'btc_out': destination_btc_out + data_btc_out,
                'btc_change': change_quantity,
                'btc_fee': final_fee,
                'tx_hex': unsigned_tx_hex,
            }
        return unsigned_tx_hex
    desired_source = script.make_canonical(desired_source)

    # Check desired info against parsed info.
    desired = (desired_source, desired_destination, desired_data)
    parsed = (parsed_source, parsed_destination, parsed_data)
    if desired != parsed:
        # Unlock (revert) UTXO locks
        if UTXO_LOCKS is not None:
            for input in inputs:
                UTXO_LOCKS[source].pop(make_outkey(input), None)

        raise exceptions.TransactionError(
            'Constructed transaction does not parse correctly: {} ≠ {}'.format(
                desired, parsed))

    if extended_tx_info:
        return {
            'btc_in': btc_in,
            'btc_out': destination_btc_out + data_btc_out,
            'btc_change': change_quantity,
            'btc_fee': final_fee,
            'tx_hex': unsigned_tx_hex,
        }
    return unsigned_tx_hex
Exemplo n.º 12
0
def parse(db, tx, message):
    cursor = db.cursor()

    # Unpack message.
    try:
        if util.enabled('broadcast_pack_text'):
            timestamp, value, fee_fraction_int, rawtext = struct.unpack(
                FORMAT + '{}s'.format(len(message) - LENGTH), message)
            textlen = VarIntSerializer.deserialize(rawtext)
            text = rawtext[-textlen:]

            assert len(text) == textlen
        else:
            if len(message) - LENGTH <= 52:
                curr_format = FORMAT + '{}p'.format(len(message) - LENGTH)
            else:
                curr_format = FORMAT + '{}s'.format(len(message) - LENGTH)

            timestamp, value, fee_fraction_int, text = struct.unpack(
                curr_format, message)

        try:
            text = text.decode('utf-8')
        except UnicodeDecodeError:
            text = ''
        status = 'valid'
    except:
        timestamp, value, fee_fraction_int, text = 0, None, 0, None
        status = 'invalid: could not unpack'

    if status == 'valid':
        # For SQLite3
        timestamp = min(timestamp, config.MAX_INT)
        value = min(value, config.MAX_INT)

        problems = validate(db, tx['source'], timestamp, value,
                            fee_fraction_int, text, tx['block_index'])
        if problems:
            status = 'invalid: ' + '; '.join(problems)

    # Lock?
    lock = False
    if text and text.lower() == 'lock':
        lock = True
        timestamp, value, fee_fraction_int, text = 0, None, None, None
    else:
        lock = False

    # Add parsed transaction to message-type–specific table.
    bindings = {
        'tx_index': tx['tx_index'],
        'tx_hash': tx['tx_hash'],
        'block_index': tx['block_index'],
        'source': tx['source'],
        'timestamp': timestamp,
        'value': value,
        'fee_fraction_int': fee_fraction_int,
        'text': text,
        'locked': lock,
        'status': status,
    }
    if "integer overflow" not in status:
        sql = 'insert into broadcasts values(:tx_index, :tx_hash, :block_index, :source, :timestamp, :value, :fee_fraction_int, :text, :locked, :status)'
        cursor.execute(sql, bindings)
    else:
        logger.warn("Not storing [broadcast] tx [%s]: %s" %
                    (tx['tx_hash'], status))
        logger.debug("Bindings: %s" % (json.dumps(bindings), ))

    # stop processing if broadcast is invalid for any reason
    if util.enabled('broadcast_invalid_check') and status != 'valid':
        return

    # Options? if the status is invalid the previous if should have catched it
    if util.enabled('options_require_memo'):
        if text and text.lower().startswith('options'):
            ops_spl = text.split(" ")
            if len(ops_spl) == 2:
                change_ops = False
                options_int = 0
                try:
                    options_int = int(ops_spl.pop())
                    change_ops = True
                except:
                    pass

                if change_ops:
                    op_bindings = {
                        'block_index': tx['block_index'],
                        'address': tx['source'],
                        'options': options_int
                    }
                    sql = 'insert or replace into addresses(address, options, block_index) values(:address, :options, :block_index)'
                    cursor = db.cursor()
                    cursor.execute(sql, op_bindings)

    # stop processing if broadcast is invalid for any reason
    # @TODO: remove this check once broadcast_invalid_check has been activated
    if util.enabled('max_fee_fraction') and status != 'valid':
        return

    cursor.close()