예제 #1
0
 def deserialize_params(self, serialized_params, remote=False):
     try:
         len_ser_host, r = serialized_params[0], serialized_params[1:]
         ser_host, r = r[0:len_ser_host], r[len_ser_host:]
         len_ser_port, r = r[0], r[1:]
         ser_port, r = r[0:len_ser_port], r[len_ser_port:]
         len_ser_key, r = r[0], r[1:]
         ser_key, r = r[0:len_ser_key], r[len_ser_key:]
         ser_version = None
         if len(r):  #Version is optional
             len_ser_version, r = r[0], r[1:]
             ser_version, r = r[0:len_ser_version], r[len_ser_version:]
     except:
         raise Exception("Incorrect serialized params"
                         )  #TODO handle exceptions more precisely
     if remote:
         self.advertised_host = ser_host.decode('utf-8')
         self.advertised_port = int(
             ser_port.decode('utf-8'))  # TODO try-except here
         self.advertised_static_key = PublicKey(bytes(ser_key), raw=True)
         try:
             self.version = ser_version.decode('utf8')
         except (AttributeError, UnicodeDecodeError) as e:
             self.version = 'unknown'
     else:
         return {
             'network': {
                 'host': ser_host.decode('utf-8'),
                 'port': int(ser_port.decode('utf-8'))
             },
             'static_key': PublicKey(bytes(ser_key), raw=True)
         }
예제 #2
0
 def deserialize_raw(self, serialized_data):
     self.drop_cached()
     consumed = b""
     if len(serialized_data) < 65:
         raise Exception("Not enough bytes to encode recovery signature")
     rec_sig, serialized_data = serialized_data[:65], serialized_data[65:]
     consumed += rec_sig
     unrelated = PublicKey(flags=ALL_FLAGS)
     if rec_sig[0] & 128 == 0:
         self.version = 0
         self.message = b""
         self.recoverable_signature = unrelated.ecdsa_recoverable_deserialize_raw(
             rec_sig)
     if rec_sig[0] & 128 == 128:
         self.version = 1
         rec_sig = (rec_sig[0] - 128).to_bytes(1, "big") + rec_sig[1:]
         self.recoverable_signature = unrelated.ecdsa_recoverable_deserialize_raw(
             rec_sig)
         if len(serialized_data) < 2:
             raise Exception("Not enough bytes to encode message len")
         mlen_ser, serialized_data = serialized_data[:2], serialized_data[
             2:]
         mlen = int.from_bytes(mlen_ser, 'big')
         if len(serialized_data) < mlen:
             raise Exception("Not enough bytes to encode message")
         self.message, serialized_data = serialized_data[:
                                                         mlen], serialized_data[
                                                             mlen:]
         consumed += mlen_ser + self.message
     self.serialized = consumed
     return serialized_data
예제 #3
0
파일: utils.py 프로젝트: WTRMQDev/leer
def decrypt(privkey, nonce, ciphertext):
    nonce = top_up_nonce(nonce)
    pubkey, ciphertext = PublicKey(pubkey=ciphertext[:33],
                                   raw=True), ciphertext[33:]
    shared_key = pubkey.ecdh(privkey.private_key)
    aead = ChaCha20Poly1305(shared_key, 'python')
    res = aead.open(nonce, ciphertext, b'')
    if res is None:
        raise Exception("Cant decrypt")
    return res
예제 #4
0
파일: utils.py 프로젝트: WTRMQDev/leer
def compare_supply_and_merkle_roots(total_supply, commitment_root,
                                    excesses_root, full_offset):
    '''
    Each txout (authorized pedersen) commitment is v*H + r*G, where v is value and r is blinding key, G and H - generators.
    Each blinding key is previous blinding key + private_key of address +/- blinding key which should be compensated by additional excesses.
    Thus if we summarized all commitments result should V*H+R*G, where
    V - is supply: sum of all unspend values on blockchain. Note that supply is not equal to all minted coins, 
        since new_outputs_fee retains some coins.
    R - is summ of private keys, private keys of additional excsses and full_offset (summ of offset of all transactions). Thus R*H is summ of all addresses's pubkeys, all additional excsses and full_offset*H. Note, that while R is not known, R*G can be calculated from public data.
    This function check that calculated
        V*H (calculated from supply) plus R*G (calculated from excesses and addresses) + full_offset*G is equal to summ of commitments V*H+R*G.
  '''
    commitment_summ = PedersenCommitment(commitment=commitment_root[:33],
                                         raw=True)
    excesses_summ = PublicKey(pubkey=excesses_root[:33],
                              raw=True).to_pedersen_commitment()
    # Instead of generating separately supply_pc (which is actually public key v*H) and
    # full_offset_pc (which is actually public key fo*G) lets generate sum
    minus_fo = subtract_offset(0, full_offset)
    supply_and_offset_pc = PedersenCommitment(
        value_generator=default_generator,
        blinding_generator=default_blinding_generator)
    supply_and_offset_pc.create(total_supply, minus_fo.to_bytes(32, "big"))
    checker = PedersenCommitment()
    return checker.verify_sum([excesses_summ, supply_and_offset_pc],
                              [commitment_summ])
예제 #5
0
파일: address.py 프로젝트: TensorVirus/leer
    def serialize(self):
        if self.version == 0 and len(self.message):
            raise

        unrelated = PublicKey(flags=ALL_FLAGS)
        if self.version == 0:
            return unrelated.ecdsa_recoverable_serialize_raw(
                self.recoverable_signature)
        if self.version == 1:
            rec_sig_serialized = unrelated.ecdsa_recoverable_serialize_raw(
                self.recoverable_signature)
            rec_sig_serialized = (rec_sig_serialized[0] | 128).to_bytes(
                1, "big") + rec_sig_serialized[1:]
            mes_serialized = len(self.message).to_bytes(2,
                                                        "big") + self.message
            return rec_sig_serialized + mes_serialized
예제 #6
0
 def sum(self, x1, x2):
     # each index is 33 bytes for commitments and 32 for hash
     comm1, hash1 = x1[:33], x1[33:65]
     comm2, hash2 = x2[:33], x2[33:65]
     comm1, comm2 = PedersenCommitment(
         commitment=comm1, raw=True), PedersenCommitment(commitment=comm2,
                                                         raw=True)
     #XXX we definetely need sum of pedersen commitments on libsecp256 level.
     pk = PublicKey()
     pk.combine([
         comm1.to_public_key().public_key,
         comm2.to_public_key().public_key
     ])
     sm = pk.to_pedersen_commitment()
     first_part = sm.serialize()
     second_part = _hash(hash1 + hash2)
     return first_part + second_part
예제 #7
0
 def sum(self, x1, x2):
     # each index is 33 bytes for commitments and 32 for hash
     pubkey1, hash1 = x1[:33], x1[33:65]
     pubkey2, hash2 = x2[:33], x2[33:65]
     pubkey1, pubkey2 = PublicKey(pubkey=pubkey1,
                                  raw=True), PublicKey(pubkey=pubkey2,
                                                       raw=True)
     # consider pubkey1+pubkey2
     pk = PublicKey()
     pk.combine([pubkey1.public_key, pubkey2.public_key])
     first_part = pk.serialize()
     second_part = _hash(hash1 + hash2)
     return first_part + second_part
예제 #8
0
 def deserialize_params(self, serialized_params, remote=False):
     try:
         len_ser_host, r = serialized_params[0], serialized_params[1:]
         ser_host, r = r[0:len_ser_host], r[len_ser_host:]
         len_ser_port, r = r[0], r[1:]
         ser_port, r = r[0:len_ser_port], r[len_ser_port:]
         len_ser_key, r = r[0], r[1:]
         ser_key, r = r[0:len_ser_key], r[len_ser_key:]
     except:
         raise Exception("Incorrect serialized params"
                         )  #TODO handle exceptions more precisely
     if remote:
         self.advertised_host = ser_host.decode('utf-8')
         self.advertised_port = int(
             ser_port.decode('utf-8'))  # TODO try-except here
         self.advertised_static_key = PublicKey(bytes(ser_key), raw=True)
     else:
         return {
             'network': {
                 'host': ser_host.decode('utf-8'),
                 'port': int(ser_port.decode('utf-8'))
             },
             'static_key': PublicKey(bytes(ser_key), raw=True)
         }
예제 #9
0
    def non_context_verify(self, block_height):
        #Actually we partially use context via block_height. Consider renaming.
        try:
            if verification_cache[(self.serialize(), block_height)]:
                #We set coinbase during verification, thus if we scip verification
                #we need to set it manually. TODO (verification should be free from initialisation stuff)
                for output in self.outputs:
                    if output.is_coinbase:
                        self.coinbase = output
                    elif output.is_dev_reward:
                        self.dev_reward = output
                return verification_cache[(self.serialize(), block_height)]
        except KeyError:
            pass

        assert is_sorted(
            self.inputs,
            key=lambda _input: _input.authorized_pedersen_commitment.serialize(
            )), "Inputs are not sorted"
        assert is_sorted(
            self.outputs,
            key=lambda _output: _output.authorized_pedersen_commitment.
            serialize()), "Outputs are not sorted"
        assert is_sorted(
            self.additional_excesses,
            key=lambda e: e.index), "Additional excesses are not sorted"

        assert len(self.inputs) == len(self.updated_excesses)
        for _input in self.inputs:
            assert _input.lock_height < block_height, "Timelocked input"
            s_i = _input.serialized_index
            assert s_i in self.updated_excesses, "Updated excesses do not contain update for address %s" % _input.address.to_text(
            )
            assert _input.address.serialized_pubkey == self.updated_excesses[
                s_i].serialized_pubkey

        #Check that there are no duplicated outputs
        #TODO probably authorized????
        assert len(
            set([
                _output.unauthorized_pedersen_commitment.serialize()
                for _output in self.outputs
            ])) == len(self.outputs), "Duplicated output"

        coinbase_num = 0
        dev_reward_num = 0
        output_apcs = []

        for output in self.outputs:
            assert output.verify(), "Nonvalid output"
            _o_index = output.serialized_index
            output_apcs.append(_o_index[:33])
            if output.is_coinbase:
                coinbase_num += 1
                self.coinbase = output
            elif output.is_dev_reward:
                dev_reward_num += 1
                self.dev_reward = output
        assert coinbase_num < 2, "More then one coinbase"
        assert dev_reward_num < 2, "More then one dev reward output"

        for excess in self.additional_excesses:
            assert excess.verify(), "Nonvalid excess"
            #if not excess.message in output_apcs:
            #  return False
            #else:
            #  output_apcs.remove(excess.message)

        left_side, right_side = [], []

        _t = PublicKey()

        # Transaction should contain outputs (while it may not contain inputs)
        assert len(self.outputs), "Empty outputs"

        if len(self.inputs):
            _t.combine([
                _input.authorized_pedersen_commitment.to_public_key().
                public_key for _input in self.inputs
            ])
            inputs_pedersen_commitment_sum = _t.to_pedersen_commitment()
            left_side.append(inputs_pedersen_commitment_sum)

        if len(self.outputs):
            _t.combine([
                _output.authorized_pedersen_commitment.to_public_key().
                public_key for _output in self.outputs
            ])
            outputs_pedersen_commitment_sum = _t.to_pedersen_commitment()
            right_side.append(outputs_pedersen_commitment_sum)

            _t.combine([
                _output.address.pubkey.public_key for _output in self.outputs
            ])
            outputs_excesses_sum = _t.to_pedersen_commitment()
            left_side.append(outputs_excesses_sum)

        if len(self.additional_excesses):
            _t.combine([
                excess.pubkey.public_key for excess in self.additional_excesses
            ])
            additional_excesses_sum = _t.to_pedersen_commitment()
            left_side.append(additional_excesses_sum)

        if coinbase_num or dev_reward_num:
            minted_value = 0
            if coinbase_num:
                minted_value += self.coinbase.value
            if dev_reward_num:
                minted_value += self.dev_reward.value
            minted_pc = PedersenCommitment(value_generator=default_generator)
            minted_pc.create(minted_value, b'\x00' * 32)
            left_side.append(minted_pc)

        relay_fee = 0
        for _output in self.outputs:
            if not _output.version == 0:
                if _output.generator == default_generator_ser:
                    relay_fee += _output.relay_fee

        new_outputs_fee = self.calc_new_outputs_fee()
        fee = relay_fee + new_outputs_fee
        negative_fee = False
        if fee < 0:
            # It's okay, transaction has consumed so many inputs that it is profitable by itself
            # however we need to handle it manually: libsecp256k1 cannot handle negative value
            negative_fee = True
            fee = -fee

        if not fee == 0:
            fee_pc = PedersenCommitment(value_generator=default_generator
                                        )  #TODO think about fees for assets
            fee_pc.create(fee, b'\x00' * 32)
            if negative_fee:
                left_side.append(fee_pc)
            else:
                right_side.append(fee_pc)

        mixer_pc = (Point(default_blinding_generator) *
                    self.mixer_offset).to_pedersen_commitment(
                    )  #TODO we should optimise here and generate fee_mixer pc
        right_side.append(mixer_pc)

        checker = PedersenCommitment()
        # For transaction which contains coinbase only, both sides will be empty
        if len(left_side) or len(right_side):
            sum_to_zero = checker.verify_sum(left_side, right_side)
            assert sum_to_zero, "Non-zero Pedersen commitments summ"

        if self.coinbase:
            info = self.coinbase.info()
            assert info['exp'] == -1, "Non-transparent coinbase"
            assert self.coinbase.lock_height >= block_height + coinbase_maturity,\
                   "Wrong coinbase maturity timelock: %d should be %d"%(\
                    self.coinbase.lock_height, block_height + coinbase_maturity)
        if self.dev_reward:
            assert self.dev_reward.lock_height >= block_height + dev_reward_maturity, \
                   "Wrong dev reward maturity: %d should be %d"%(\
                    self.dev_reward.lock_height, block_height + dev_reward_maturity)

        tx_skel = TransactionSkeleton(tx=self)
        assert len(tx_skel.serialize(
            rich_format=False)) < 50000, "Too big tx_skeleton"
        verification_cache[(self.serialize(), block_height)] = True
        return True
예제 #10
0
    def check_global_message_queue(self):
        while not self.global_message_queue.empty():
            message = self.global_message_queue.get()
            action = message['action']
            if action == "open connection":
                host, port, static_key = message['host'], message[
                    'port'], PublicKey(message['static_key'], raw=True)
                coro = self.connect_to((host, port), static_key=static_key)
                asyncio.ensure_future(coro, loop=self.loop)
            if action == "get connections num":
                _id = message['id']
                request_source = message['sender']
                self.syncer.queues[request_source].put({
                    'id':
                    _id,
                    'result':
                    len(self.nodes)
                })

            if action == "give intrinsic nodes list":  #Not list anymore, considr renaming TODO
                _id = message['id']
                request_source = message['sender']
                nodes_info = list(self.nodes.keys())
                self.syncer.queues[request_source].put({
                    'id': _id,
                    'result': nodes_info
                })

            if action == "give nodes list":  #Not list anymore, considr renaming TODO
                _id = message['id']
                request_source = message['sender']
                nodes_info = {}
                for node_params in self.nodes:
                    node = self.nodes[node_params]
                    nodes_info[(node.advertised_host,
                                node.advertised_port)] = {
                                    'pubkey': node.static_key.serialize(),
                                    'version': node.version
                                }
                self.syncer.queues[request_source].put({
                    'id': _id,
                    'result': nodes_info
                })

            if action == "give my node":
                _id = message['id']
                request_source = message['sender']
                nodes_info = {
                    (self.our_node.advertised_host, self.our_node.advertised_port):
                    self.our_node.static_key.serialize()
                }
                self.syncer.queues[request_source].put({
                    'id': _id,
                    'result': nodes_info
                })

            if action == "take the headers":
                num, headers, node_params = message["num"], message[
                    "headers"], message["node"]
                if not node_params in self.nodes:
                    continue
                message_to_send = inv_message_id["take the headers"]
                message_to_send += num.to_bytes(2, "big")
                message_to_send += headers
                coro = self.nodes[node_params].send(message_to_send)
                asyncio.ensure_future(coro, loop=self.loop)
                self.syncer.queues[message['sender']].put({
                    'id': message['id'],
                    'result': 'processed'
                })
            if action == "take the blocks":
                num, blocks, node_params = message["num"], message[
                    "blocks"], message["node"]
                if not node_params in self.nodes:
                    continue
                message_to_send = inv_message_id["take the blocks"]
                message_to_send += num.to_bytes(2, "big")
                message_to_send += blocks
                coro = self.nodes[node_params].send(message_to_send)
                asyncio.ensure_future(coro, loop=self.loop)
                self.syncer.queues[message['sender']].put({
                    'id': message['id'],
                    'result': 'processed'
                })
            if action == "take the txos":
                num, txos, txos_hashes, txos_lengths, node_params = message[
                    "num"], message["txos"], message["txos_hashes"], message[
                        "txos_lengths"], message["node"]
                if not node_params in self.nodes:
                    continue
                message_to_send = inv_message_id["take the txos"]
                message_to_send += num.to_bytes(2, "big")
                message_to_send += txos_hashes
                message_to_send += txos_lengths
                message_to_send += txos
                coro = self.nodes[node_params].send(message_to_send)
                asyncio.ensure_future(coro, loop=self.loop)
                self.syncer.queues[message['sender']].put({
                    'id': message['id'],
                    'result': 'processed'
                })
            if action == "give blocks":
                num, blocks_hashes, node_params = message["num"], message[
                    "block_hashes"], message["node"]
                if not node_params in self.nodes:
                    continue
                message_to_send = inv_message_id["give blocks"]
                message_to_send += num.to_bytes(2, "big")
                message_to_send += blocks_hashes
                coro = self.nodes[node_params].send(message_to_send)
                asyncio.ensure_future(coro, loop=self.loop)
                self.syncer.queues[message['sender']].put({
                    'id': message['id'],
                    'result': 'processed'
                })
            if action == "give next headers":
                num, from_hash, node_params = message["num"], message[
                    "from"], message["node"]
                if not node_params in self.nodes:
                    continue
                message_to_send = inv_message_id["give next headers"]
                message_to_send += num.to_bytes(2, "big")
                message_to_send += from_hash
                coro = self.nodes[node_params].send(message_to_send)
                asyncio.ensure_future(coro, loop=self.loop)
                self.syncer.queues[message['sender']].put({
                    'id': message['id'],
                    'result': 'processed'
                })
            if action == "give txos":
                num, txos_hashes, node_params = message["num"], message[
                    "txos_hashes"], message["node"]
                if not node_params in self.nodes:
                    continue
                message_to_send = inv_message_id["give the txos"]
                message_to_send += num.to_bytes(2, "big")
                message_to_send += txos_hashes
                coro = self.nodes[node_params].send(message_to_send)
                asyncio.ensure_future(coro, loop=self.loop)
                self.syncer.queues[message['sender']].put({
                    'id': message['id'],
                    'result': 'processed'
                })
            if action == "take tip info":
                logger.info("Take tip info")
                height, tip, prev_hash, total_difficulty, node_params = message[
                    "height"], message["tip"], message["prev_hash"], message[
                        "total_difficulty"], message["node"]
                if not node_params in self.nodes:
                    continue
                message_to_send = inv_message_id["take tip info"]
                message_to_send += height.to_bytes(4, "big")
                message_to_send += tip
                message_to_send += prev_hash
                message_to_send += total_difficulty.to_bytes(32, "big")
                coro = self.nodes[node_params].send(message_to_send)
                asyncio.ensure_future(coro, loop=self.loop)
                self.syncer.queues[message['sender']].put({
                    'id': message['id'],
                    'result': 'processed'
                })
            if action == "find common root":
                serialized_header, node_params = message[
                    "serialized_header"], message["node"]
                if not node_params in self.nodes:
                    continue
                message_to_send = inv_message_id["find common root"]
                message_to_send += serialized_header
                coro = self.nodes[node_params].send(message_to_send)
                asyncio.ensure_future(coro, loop=self.loop)
                self.syncer.queues[message['sender']].put({
                    'id': message['id'],
                    'result': 'processed'
                })
            if action == "find common root response":
                header_hash, known_headers, _len, node_params = message[
                    "header_hash"], message["known_headers"], message[
                        'flags_num'], message["node"]
                if not node_params in self.nodes:
                    continue
                message_to_send = inv_message_id["find common root response"]
                message_to_send += header_hash
                message_to_send += _len.to_bytes(1, "big")
                message_to_send += known_headers
                coro = self.nodes[node_params].send(message_to_send)
                asyncio.ensure_future(coro, loop=self.loop)
                self.syncer.queues[message['sender']].put({
                    'id': message['id'],
                    'result': 'processed'
                })

            if action == "take TBM transaction":
                mode, serialized_tx_skel, node_params = message[
                    "mode"], message["tx_skel"], message["node"]
                if not node_params in self.nodes:
                    continue
                message_to_send = inv_message_id["take TBM transaction"]
                message_to_send += mode.to_bytes(2, "big")
                message_to_send += serialized_tx_skel
                coro = self.nodes[node_params].send(message_to_send)
                asyncio.ensure_future(coro, loop=self.loop)
                self.syncer.queues[message['sender']].put({
                    'id': message['id'],
                    'result': 'processed'
                })

            if action == "give TBM transaction":
                node_params = message["node"]
                if not node_params in self.nodes:
                    continue
                message_to_send = inv_message_id["give TBM transaction"]
                coro = self.nodes[node_params].send(message_to_send)
                asyncio.ensure_future(coro, loop=self.loop)
                self.syncer.queues[message['sender']].put({
                    'id': message['id'],
                    'result': 'processed'
                })
            '''if action == "send ping":
              for node in self.nodes:
                coro = node.send( "ping 0")
                asyncio.ensure_future(coro, loop=self.loop)'''
            if action == "stop":
                logger.info("NetworkManager stops")
                self.loop.stop()
                return
        if self.up:
            self.loop.call_later(0.5, self.check_global_message_queue)
예제 #11
0
 def calc_nonrec(self):
     unrelated = PublicKey(flags=ALL_FLAGS)
     self.cached_nonrec = unrelated.ecdsa_recoverable_convert(
         self.recoverable_signature)
예제 #12
0
 def calc_pubkey(self):
     unrelated = PublicKey(flags=ALL_FLAGS)
     self.cached_pubkey = PublicKey(pubkey=unrelated.ecdsa_recover(
         self.message, self.recoverable_signature))
예제 #13
0
    def non_context_verify(self, block_height):
        #Actually we partially use context via block_height. Consider renaming.

        assert is_sorted(
            self.inputs,
            key=lambda _input: _input.authorized_pedersen_commitment.serialize(
            )), "Inputs are not sorted"
        assert is_sorted(
            self.outputs,
            key=lambda _output: _output.authorized_pedersen_commitment.
            serialize()), "Outputs are not sorted"

        for _input in self.inputs:
            assert _input.lock_height < block_height, "Timelocked input"

        #Check that there are no duplicated outputs
        #TODO probably authorized????
        assert len(
            set([
                _output.unauthorized_pedersen_commitment.serialize()
                for _output in self.outputs
            ])) == len(self.outputs), "Duplicated output"

        coinbase_num = 0

        output_apcs = []

        for output in self.outputs:
            assert output.verify(), "Nonvalid output"
            _o_index = output.serialized_index
            output_apcs.append(_o_index[:33])
            if output.version == 0:
                coinbase_num += 1
                self.coinbase = output
        assert coinbase_num < 2, "More then one coinbase"

        for excess in self.additional_excesses:
            assert excess.verify(), "Nonvalid excess"
            if not excess.message in output_apcs:
                return False
            else:
                output_apcs.remove(excess.message)

        left_side, right_side = [], []

        _t = PublicKey()

        # Transaction should contain either outputs (while it may not contain inputs)
        # or combined excesses (for transactions which only delete excesses)
        assert len(self.outputs) or len(
            self.combined_excesses), "Empty outputs"

        if len(self.inputs):
            _t.combine([
                _input.authorized_pedersen_commitment.to_public_key().
                public_key for _input in self.inputs
            ])
            inputs_pedersen_commitment_sum = _t.to_pedersen_commitment()
            left_side.append(inputs_pedersen_commitment_sum)

        if len(self.outputs):
            _t.combine([
                _output.authorized_pedersen_commitment.to_public_key().
                public_key for _output in self.outputs
            ])
            outputs_pedersen_commitment_sum = _t.to_pedersen_commitment()
            right_side.append(outputs_pedersen_commitment_sum)

            _t.combine([
                _output.address.pubkey.public_key for _output in self.outputs
            ])
            outputs_excesses_sum = _t.to_pedersen_commitment()
            left_side.append(outputs_excesses_sum)

        if len(self.additional_excesses):
            _t.combine([
                excess.pubkey.public_key for excess in self.additional_excesses
            ])
            additional_excesses_sum = _t.to_pedersen_commitment()
            left_side.append(additional_excesses_sum)

        if coinbase_num:
            minted_pc = PedersenCommitment(blinded_generator=default_generator)
            minted_pc.create(self.coinbase.value, b'\x00' * 32)
            left_side.append(minted_pc)

        relay_fee = 0
        for _output in self.outputs:
            if not _output.version == 0:
                if _output.generator == default_generator_ser:
                    relay_fee += _output.relay_fee

        new_outputs_fee = self.calc_new_outputs_fee()
        fee = relay_fee + new_outputs_fee

        negative_fee = False
        if fee < 0:
            # It's okay, transaction has consumed so many inputs that it is profitable by itself
            # however we need to handle it manually: libsecp256k1 cannot handle negative value
            negative_fee = True
            fee = -fee

        if not fee == 0:
            fee_pc = PedersenCommitment(blinded_generator=default_generator
                                        )  #TODO think about fees for assets
            fee_pc.create(fee, b'\x00' * 32)
            if negative_fee:
                left_side.append(fee_pc)
            else:
                right_side.append(fee_pc)

        checker = PedersenCommitment()
        # For transaction which contains coinbase only, both sides will be empty
        if len(left_side) or len(right_side):
            sum_to_zero = checker.verify_sum(left_side, right_side)
            assert sum_to_zero, "Non-zero Pedersen commitments summ"

        if not GLOBAL_TEST['skip combined excesses']:
            raise NotImplemented
        if self.coinbase:
            info = self.coinbase.info()
            assert info['exp'] == -1, "Non-transparent coinbase"
            # TODO Ugly ->`self.txos_storage.storage_space.blockchain.current_height`
            assert self.coinbase.lock_height >= block_height + coinbase_maturity,\
                   "Wrong coinbase maturity timelock: %d should be %d"%(\
                    self.coinbase.lock_height, block_height + coinbase_maturity)

        tx_skel = TransactionSkeleton(tx=self)
        assert len(tx_skel.serialize(
            rich_format=False)) < 50000, "Too big tx_skeleton"
        return True
예제 #14
0
파일: address.py 프로젝트: TensorVirus/leer
 def nonrec_signature(self):
     unrelated = PublicKey(flags=ALL_FLAGS)
     return unrelated.ecdsa_recoverable_convert(self.recoverable_signature)
예제 #15
0
파일: address.py 프로젝트: TensorVirus/leer
 def pubkey(self):
     unrelated = PublicKey(flags=ALL_FLAGS)
     return PublicKey(pubkey=unrelated.ecdsa_recover(
         self.message, self.recoverable_signature))