コード例 #1
0
ファイル: mempool_tx.py プロジェクト: MasterFrumma/leer
 def add_tx(self, tx):
     if isinstance(tx, Transaction):
         tx_skel = TransactionSkeleton(tx=tx)
         self.built_tx[tx_skel.serialize()] = tx
         self.transactions.append(tx_skel)
     elif isinstance(tx, TransactionSkeleton):
         self.transactions.append(tx)
     else:
         raise
     self.update(reason="Tx addition")
コード例 #2
0
    def non_context_verify(self, block_height):
        #Actually we partially use context via block_height. Consider renaming.
        try:
            if verification_cache[(self.serialize(), block_height)]:
                #We set coinbase during verification, thus if we scip verification
                #we need to set it manually. TODO (verification should be free from initialisation stuff)
                for output in self.outputs:
                    if output.is_coinbase:
                        self.coinbase = output
                    elif output.is_dev_reward:
                        self.dev_reward = output
                return verification_cache[(self.serialize(), block_height)]
        except KeyError:
            pass

        assert is_sorted(
            self.inputs,
            key=lambda _input: _input.authorized_pedersen_commitment.serialize(
            )), "Inputs are not sorted"
        assert is_sorted(
            self.outputs,
            key=lambda _output: _output.authorized_pedersen_commitment.
            serialize()), "Outputs are not sorted"
        assert is_sorted(
            self.additional_excesses,
            key=lambda e: e.index), "Additional excesses are not sorted"

        assert len(self.inputs) == len(self.updated_excesses)
        for _input in self.inputs:
            assert _input.lock_height < block_height, "Timelocked input"
            s_i = _input.serialized_index
            assert s_i in self.updated_excesses, "Updated excesses do not contain update for address %s" % _input.address.to_text(
            )
            assert _input.address.serialized_pubkey == self.updated_excesses[
                s_i].serialized_pubkey

        #Check that there are no duplicated outputs
        #TODO probably authorized????
        assert len(
            set([
                _output.unauthorized_pedersen_commitment.serialize()
                for _output in self.outputs
            ])) == len(self.outputs), "Duplicated output"

        coinbase_num = 0
        dev_reward_num = 0
        output_apcs = []

        for output in self.outputs:
            assert output.verify(), "Nonvalid output"
            _o_index = output.serialized_index
            output_apcs.append(_o_index[:33])
            if output.is_coinbase:
                coinbase_num += 1
                self.coinbase = output
            elif output.is_dev_reward:
                dev_reward_num += 1
                self.dev_reward = output
        assert coinbase_num < 2, "More then one coinbase"
        assert dev_reward_num < 2, "More then one dev reward output"

        for excess in self.additional_excesses:
            assert excess.verify(), "Nonvalid excess"
            #if not excess.message in output_apcs:
            #  return False
            #else:
            #  output_apcs.remove(excess.message)

        left_side, right_side = [], []

        _t = PublicKey()

        # Transaction should contain outputs (while it may not contain inputs)
        assert len(self.outputs), "Empty outputs"

        if len(self.inputs):
            _t.combine([
                _input.authorized_pedersen_commitment.to_public_key().
                public_key for _input in self.inputs
            ])
            inputs_pedersen_commitment_sum = _t.to_pedersen_commitment()
            left_side.append(inputs_pedersen_commitment_sum)

        if len(self.outputs):
            _t.combine([
                _output.authorized_pedersen_commitment.to_public_key().
                public_key for _output in self.outputs
            ])
            outputs_pedersen_commitment_sum = _t.to_pedersen_commitment()
            right_side.append(outputs_pedersen_commitment_sum)

            _t.combine([
                _output.address.pubkey.public_key for _output in self.outputs
            ])
            outputs_excesses_sum = _t.to_pedersen_commitment()
            left_side.append(outputs_excesses_sum)

        if len(self.additional_excesses):
            _t.combine([
                excess.pubkey.public_key for excess in self.additional_excesses
            ])
            additional_excesses_sum = _t.to_pedersen_commitment()
            left_side.append(additional_excesses_sum)

        if coinbase_num or dev_reward_num:
            minted_value = 0
            if coinbase_num:
                minted_value += self.coinbase.value
            if dev_reward_num:
                minted_value += self.dev_reward.value
            minted_pc = PedersenCommitment(value_generator=default_generator)
            minted_pc.create(minted_value, b'\x00' * 32)
            left_side.append(minted_pc)

        relay_fee = 0
        for _output in self.outputs:
            if not _output.version == 0:
                if _output.generator == default_generator_ser:
                    relay_fee += _output.relay_fee

        new_outputs_fee = self.calc_new_outputs_fee()
        fee = relay_fee + new_outputs_fee
        negative_fee = False
        if fee < 0:
            # It's okay, transaction has consumed so many inputs that it is profitable by itself
            # however we need to handle it manually: libsecp256k1 cannot handle negative value
            negative_fee = True
            fee = -fee

        if not fee == 0:
            fee_pc = PedersenCommitment(value_generator=default_generator
                                        )  #TODO think about fees for assets
            fee_pc.create(fee, b'\x00' * 32)
            if negative_fee:
                left_side.append(fee_pc)
            else:
                right_side.append(fee_pc)

        mixer_pc = (Point(default_blinding_generator) *
                    self.mixer_offset).to_pedersen_commitment(
                    )  #TODO we should optimise here and generate fee_mixer pc
        right_side.append(mixer_pc)

        checker = PedersenCommitment()
        # For transaction which contains coinbase only, both sides will be empty
        if len(left_side) or len(right_side):
            sum_to_zero = checker.verify_sum(left_side, right_side)
            assert sum_to_zero, "Non-zero Pedersen commitments summ"

        if self.coinbase:
            info = self.coinbase.info()
            assert info['exp'] == -1, "Non-transparent coinbase"
            assert self.coinbase.lock_height >= block_height + coinbase_maturity,\
                   "Wrong coinbase maturity timelock: %d should be %d"%(\
                    self.coinbase.lock_height, block_height + coinbase_maturity)
        if self.dev_reward:
            assert self.dev_reward.lock_height >= block_height + dev_reward_maturity, \
                   "Wrong dev reward maturity: %d should be %d"%(\
                    self.dev_reward.lock_height, block_height + dev_reward_maturity)

        tx_skel = TransactionSkeleton(tx=self)
        assert len(tx_skel.serialize(
            rich_format=False)) < 50000, "Too big tx_skeleton"
        verification_cache[(self.serialize(), block_height)] = True
        return True
コード例 #3
0
def core_loop(syncer, config):
    init_storage_space(config)

    nodes = {}
    requests = {}  # requests to other node's subprocesses
    requests_cache = {
        "blocks": [],
        "txouts": []
    }  # requests of assets to other nodes

    set_ask_for_blocks_hook(storage_space.blockchain, requests_cache)
    set_ask_for_txouts_hook(storage_space.blocks_storage, requests_cache)
    if config['wallet']:
        set_notify_wallet_hook(storage_space.blockchain,
                               syncer.queues['Wallet'])

    message_queue = syncer.queues['Blockchain']
    message_queue.put({"action": "give nodes list reminder"})
    message_queue.put({"action": "check requests cache"})

    #set logging
    default_log_level = logging.INFO
    if "logging" in config:  #debug, info, warning, error, critical
        loglevels = {
            "debug": logging.DEBUG,
            "info": logging.INFO,
            "warning": logging.WARNING,
            "error": logging.ERROR,
            "critical": logging.CRITICAL
        }
        if "base" in config["logging"] and config["logging"][
                "base"] in loglevels:
            logger.setLevel(loglevels[config["logging"]["base"]])
        if "core" in config["logging"] and config["logging"][
                "core"] in loglevels:
            #its ok to rewrite
            logger.setLevel(loglevels[config["logging"]["core"]])

    is_benchmark = config.get('testnet_options', {}).get('benchmark', False)
    no_pow = config.get('testnet_options', {}).get('do_not_check_pow', False)

    def get_new_address(timeout=2.5):  #blocking
        _id = str(uuid4())
        syncer.queues['Wallet'].put({
            'action': 'give new address',
            'id': _id,
            'sender': "Blockchain"
        })
        result = None
        start_time = time()
        while True:
            put_back = [
            ]  #We wait for specific message, all others will wait for being processed
            while not message_queue.empty():
                message = message_queue.get()
                if (not 'id' in message) or (not message['id'] == _id):
                    put_back.append(message)
                    continue
                result = message['result']
                break
            for message in put_back:
                message_queue.put(message)
            if result:
                break
            sleep(0.01)
            if time() - start_time > timeout:
                raise Exception(
                    "get_new_address timeout: probably wallet has collapsed or not running"
                )
        if result == 'error':
            raise Exception("Can not get_new_address: error on wallet side")
        address = Address()
        logger.info("Receiving address %s (len %d)" % (result, len(result)))
        address.deserialize_raw(result)
        return address

    def send_message(destination, message):
        logger.debug("Sending message to %s:\t\t %s" %
                     (str(destination), str(message)))
        if not 'id' in message:
            message['id'] = uuid4()
        if not 'sender' in message:
            message['sender'] = "Blockchain"
        syncer.queues[destination].put(message)

    def send_to_network(message):
        send_message("NetworkManager", message)

    notify = partial(set_value_to_queue, syncer.queues["Notifications"],
                     "Blockchain")

    core_context = CoreContext(storage_space, logger, nodes, notify,
                               send_message, get_new_address, config)
    logger.debug("Start of core loop")
    with storage_space.env.begin(
            write=True
    ) as rtx:  #Set basic chain info, so wallet and other services can start work
        notify("blockchain height",
               storage_space.blockchain.current_height(rtx=rtx))
        notify("best header", storage_space.headers_manager.best_header_height)
    while True:
        sleep(0.05)
        put_back_messages = []
        notify("core workload", "idle")
        while not message_queue.empty():
            message = message_queue.get()
            if 'time' in message and message['time'] > time(
            ):  # delay this message
                put_back_messages.append(message)
                continue
            if (('result' in message) and message['result']=="processed") or \
               (('result' in message) and message['result']=="set") or \
               (('action' in message) and message['action']=="give nodes list reminder") or \
               (('action' in message) and message['action']=="check requests cache") or \
               (('action' in message) and message['action']=="take nodes list") or \
               (('result' in message) and is_ip_port_array(message['result'])):
                logger.debug("Processing message %s" % message)
            else:
                if 'action' in message:
                    logger.info("Processing message `%s`" % message['action'])
                else:
                    logger.info("Processing message %s" % message)
            if not 'action' in message:  #it is response
                if message['id'] in requests:  # response is awaited
                    if requests[message['id']] == "give nodes list":
                        requests.pop(message['id'])
                        message_queue.put({
                            "action": "take nodes list",
                            "nodes": message["result"]
                        })
                else:
                    pass  #Drop
                continue
            try:
                if ("node" in message) and (not message["node"] in nodes):
                    nodes[message["node"]] = {'node': message["node"]}
                if message["action"] == "take the headers":
                    notify("core workload", "processing new headers")
                    with storage_space.env.begin(write=True) as wtx:
                        process_new_headers(message, nodes[message["node"]],
                                            wtx, core_context)
                    notify("best header",
                           storage_space.headers_manager.best_header_height)
                if message["action"] == "take the blocks":
                    notify("core workload", "processing new blocks")
                    with storage_space.env.begin(write=True) as wtx:
                        initial_tip = storage_space.blockchain.current_tip(
                            rtx=wtx)
                        process_new_blocks(message, wtx, core_context)
                        after_tip = storage_space.blockchain.current_tip(
                            rtx=wtx)
                        notify(
                            "blockchain height",
                            storage_space.blockchain.current_height(rtx=wtx))
                        if not after_tip == initial_tip:
                            notify_all_nodes_about_new_tip(nodes,
                                                           rtx=wtx,
                                                           core=core_context,
                                                           _except=[],
                                                           _payload_except=[])
                        look_forward(nodes, send_to_network, rtx=wtx)
                if message["action"] == "take the txos":
                    notify("core workload", "processing new txos")
                    with storage_space.env.begin(write=True) as wtx:
                        process_new_txos(message, wtx=wtx, core=core_context)
                        #After downloading new txos some blocs may become downloaded
                        notify(
                            "blockchain height",
                            storage_space.blockchain.current_height(rtx=wtx))
                        look_forward(nodes, send_to_network, rtx=wtx)
                if message[
                        "action"] in request_handlers:  #blocks, headers, txos and tbm
                    notify("core workload", "processing " + message["action"])
                    with storage_space.env.begin(write=False) as rtx:
                        request_handlers[message["action"]](message,
                                                            rtx=rtx,
                                                            core=core_context)
                if message[
                        "action"] in metadata_handlers:  # take tip, find common root [response]
                    with storage_space.env.begin(write=False) as rtx:
                        metadata_handlers[message["action"]](
                            message,
                            nodes[message["node"]],
                            rtx=rtx,
                            core=core_context)
                if message["action"] == "take TBM transaction":
                    notify("core workload", "processing mempool tx")
                    with storage_space.env.begin(write=False) as rtx:
                        process_tbm_tx(message, rtx=rtx, core=core_context)
                if message["action"] == "give tip height":
                    with storage_space.env.begin(write=False) as rtx:
                        _ch = storage_space.blockchain.current_height(rtx=rtx)
                        send_message(message["sender"], {
                            "id": message["id"],
                            "result": _ch
                        })
                    notify("blockchain height", _ch)
            except DOSException as e:
                logger.info("DOS Exception %s" % str(e))
                #raise e #TODO send to NM
            except Exception as e:
                raise e

            if message["action"] == "give block info":
                notify("core workload", "reading block info")
                try:
                    with storage_space.env.begin(write=False) as rtx:
                        block_info = compose_block_info(message["block_num"],
                                                        rtx=rtx)
                    send_message(message["sender"], {
                        "id": message["id"],
                        "result": block_info
                    })
                except Exception as e:
                    send_message(message["sender"], {
                        "id": message["id"],
                        "result": "error",
                        "error": str(e)
                    })
            if message[
                    "action"] == "put arbitrary mining work" and is_benchmark:
                if not no_pow:
                    raise Exception(
                        "`put arbitrary mining work` is only allowed for disabled pow checks"
                    )
                notify("core workload", "putting arbitrary mining work")
                message["nonce"] = b"\x00" * 8
                message['partial_hash'] = list(
                    storage_space.mempool_tx.work_block_assoc.inner_dict.keys(
                    ))[-1]
                message['action'] = "take mining work"
            if message[
                    "action"] in mining_operations:  #getwork, gbt, submitblock, submitwork
                notify("core workload", "processing" + message["action"])
                with storage_space.env.begin(write=True) as wtx:
                    mining_operations[message["action"]](message, wtx,
                                                         core_context)
            if message["action"] == "set mining address" and is_benchmark:
                address = Address()
                address.deserialize_raw(message["address"])
                core_context.mining_address = address
            if message["action"] == "give synchronization status":
                with storage_space.env.begin(write=False) as rtx:
                    our_height = storage_space.blockchain.current_height(
                        rtx=rtx)
                best_known_header = storage_space.headers_manager.best_header_height
                try:
                    best_advertised_height = max([
                        nodes[node]["height"] for node in nodes
                        if "height" in nodes[node]
                    ])
                except:
                    best_advertised_height = None
                send_message(
                    message["sender"], {
                        "id": message["id"],
                        "result": {
                            'height': our_height,
                            'best_known_header': best_known_header,
                            'best_advertised_height': best_advertised_height
                        }
                    })
                notify("best header", best_known_header)
                notify("blockchain height", our_height)
                notify("best advertised height", best_advertised_height)

            if message["action"] == "add tx to mempool":
                notify("core workload", "processing local transaction")
                response = {"id": message["id"]}
                #deserialization
                try:
                    ser_tx = message["tx"]
                    tx = Transaction(
                        txos_storage=storage_space.txos_storage,
                        excesses_storage=storage_space.excesses_storage)
                    with storage_space.env.begin(write=False) as rtx:
                        tx.deserialize(ser_tx, rtx)
                        storage_space.mempool_tx.add_tx(tx, rtx=rtx)
                        tx_skel = TransactionSkeleton(tx=tx)
                        notify_all_nodes_about_tx(tx_skel.serialize(
                            rich_format=True, max_size=40000),
                                                  core_context,
                                                  _except=[],
                                                  mode=1)
                    response['result'] = "generated"
                except Exception as e:
                    response['result'] = 'error'
                    response['error'] = str(e)
                    logger.error("Problem in tx: %s" % str(e))
                send_message(message["sender"], response)

            #message from core_loop
            if message[
                    "action"] in download_status_checks:  # txouts and blocks download status checks
                with storage_space.env.begin(write=True) as rtx:
                    ret_mes = download_status_checks[message["action"]](
                        message, rtx, core_context)
                    if ret_mes:
                        put_back_messages.append(ret_mes)
            if message["action"] == "take nodes list":
                for node in message["nodes"]:
                    if not node in nodes:  #Do not overwrite
                        nodes[node] = {"node": node}
                disconnected_nodes = []
                for existing_node in nodes:
                    if not existing_node in message["nodes"]:
                        disconnected_nodes.append(existing_node)
                for dn in disconnected_nodes:
                    nodes.pop(dn)

            if message["action"] == "give nodes list reminder":
                _id = str(uuid4())
                send_to_network({
                    "action": "give intrinsic nodes list",
                    "sender": "Blockchain",
                    "id": _id
                })
                requests[_id] = "give nodes list"
                put_back_messages.append({
                    "action": "give nodes list reminder",
                    "time": int(time()) + 3
                })

            if message["action"] == "stop":
                logger.info("Core loop stops")
                return

            if message["action"] == "shutdown":
                initiator = message["sender"]
                logger.info("Shutdown initiated by %s" % initiator)
                for receiver in [
                        'NetworkManager', 'Blockchain', 'RPCManager',
                        'Notifications', 'Wallet'
                ]:
                    send_message(receiver, {
                        "action": "stop",
                        "sender": initiator
                    })

            if message["action"] == "check requests cache":
                put_back_messages.append({
                    "action": "check requests cache",
                    "time": int(time()) + 5
                })
                for k in requests_cache:
                    if not len(requests_cache[k]):
                        continue
                    copy = list(set(requests_cache[k]))
                    copy = sorted(copy,
                                  key=lambda x: requests_cache[k].index(x)
                                  )  #preserve order of downloaded objects
                    if k == "blocks":
                        chunk_size = 20
                        while len(copy):
                            request, copy = copy[:chunk_size], copy[
                                chunk_size:]
                            new_message = {
                                "action": "check blocks download status",
                                "block_hashes": request,
                                "already_asked_nodes": [],
                                "id": str(uuid4()),
                                "time": -1
                            }
                            message_queue.put(new_message)
                        requests_cache[k] = []
                    if k == "txouts":
                        chunk_size = 30
                        while len(copy):
                            request, copy = copy[:chunk_size], copy[
                                chunk_size:]
                            new_message = {
                                "action": "check txouts download status",
                                "txos_hashes": request,
                                "already_asked_nodes": [],
                                "id": str(uuid4()),
                                "time": -1
                            }
                            message_queue.put(new_message)
                        requests_cache[k] = []

        for _message in put_back_messages:
            message_queue.put(_message)

        try:
            with storage_space.env.begin(write=True) as rtx:
                check_sync_status(nodes, rtx=rtx, core_context=core_context)
            try:
                best_advertised_height = max([
                    nodes[node]["height"] for node in nodes
                    if "height" in nodes[node]
                ])
            except:
                best_advertised_height = None
            notify("best advertised height", best_advertised_height)
        except Exception as e:
            logger.error(e)
コード例 #4
0
ファイル: transaction.py プロジェクト: MasterFrumma/leer
    def non_context_verify(self, block_height):
        #Actually we partially use context via block_height. Consider renaming.

        assert is_sorted(
            self.inputs,
            key=lambda _input: _input.authorized_pedersen_commitment.serialize(
            )), "Inputs are not sorted"
        assert is_sorted(
            self.outputs,
            key=lambda _output: _output.authorized_pedersen_commitment.
            serialize()), "Outputs are not sorted"

        for _input in self.inputs:
            assert _input.lock_height < block_height, "Timelocked input"

        #Check that there are no duplicated outputs
        #TODO probably authorized????
        assert len(
            set([
                _output.unauthorized_pedersen_commitment.serialize()
                for _output in self.outputs
            ])) == len(self.outputs), "Duplicated output"

        coinbase_num = 0

        output_apcs = []

        for output in self.outputs:
            assert output.verify(), "Nonvalid output"
            _o_index = output.serialized_index
            output_apcs.append(_o_index[:33])
            if output.version == 0:
                coinbase_num += 1
                self.coinbase = output
        assert coinbase_num < 2, "More then one coinbase"

        for excess in self.additional_excesses:
            assert excess.verify(), "Nonvalid excess"
            if not excess.message in output_apcs:
                return False
            else:
                output_apcs.remove(excess.message)

        left_side, right_side = [], []

        _t = PublicKey()

        # Transaction should contain either outputs (while it may not contain inputs)
        # or combined excesses (for transactions which only delete excesses)
        assert len(self.outputs) or len(
            self.combined_excesses), "Empty outputs"

        if len(self.inputs):
            _t.combine([
                _input.authorized_pedersen_commitment.to_public_key().
                public_key for _input in self.inputs
            ])
            inputs_pedersen_commitment_sum = _t.to_pedersen_commitment()
            left_side.append(inputs_pedersen_commitment_sum)

        if len(self.outputs):
            _t.combine([
                _output.authorized_pedersen_commitment.to_public_key().
                public_key for _output in self.outputs
            ])
            outputs_pedersen_commitment_sum = _t.to_pedersen_commitment()
            right_side.append(outputs_pedersen_commitment_sum)

            _t.combine([
                _output.address.pubkey.public_key for _output in self.outputs
            ])
            outputs_excesses_sum = _t.to_pedersen_commitment()
            left_side.append(outputs_excesses_sum)

        if len(self.additional_excesses):
            _t.combine([
                excess.pubkey.public_key for excess in self.additional_excesses
            ])
            additional_excesses_sum = _t.to_pedersen_commitment()
            left_side.append(additional_excesses_sum)

        if coinbase_num:
            minted_pc = PedersenCommitment(blinded_generator=default_generator)
            minted_pc.create(self.coinbase.value, b'\x00' * 32)
            left_side.append(minted_pc)

        relay_fee = 0
        for _output in self.outputs:
            if not _output.version == 0:
                if _output.generator == default_generator_ser:
                    relay_fee += _output.relay_fee

        new_outputs_fee = self.calc_new_outputs_fee()
        fee = relay_fee + new_outputs_fee

        negative_fee = False
        if fee < 0:
            # It's okay, transaction has consumed so many inputs that it is profitable by itself
            # however we need to handle it manually: libsecp256k1 cannot handle negative value
            negative_fee = True
            fee = -fee

        if not fee == 0:
            fee_pc = PedersenCommitment(blinded_generator=default_generator
                                        )  #TODO think about fees for assets
            fee_pc.create(fee, b'\x00' * 32)
            if negative_fee:
                left_side.append(fee_pc)
            else:
                right_side.append(fee_pc)

        checker = PedersenCommitment()
        # For transaction which contains coinbase only, both sides will be empty
        if len(left_side) or len(right_side):
            sum_to_zero = checker.verify_sum(left_side, right_side)
            assert sum_to_zero, "Non-zero Pedersen commitments summ"

        if not GLOBAL_TEST['skip combined excesses']:
            raise NotImplemented
        if self.coinbase:
            info = self.coinbase.info()
            assert info['exp'] == -1, "Non-transparent coinbase"
            # TODO Ugly ->`self.txos_storage.storage_space.blockchain.current_height`
            assert self.coinbase.lock_height >= block_height + coinbase_maturity,\
                   "Wrong coinbase maturity timelock: %d should be %d"%(\
                    self.coinbase.lock_height, block_height + coinbase_maturity)

        tx_skel = TransactionSkeleton(tx=self)
        assert len(tx_skel.serialize(
            rich_format=False)) < 50000, "Too big tx_skeleton"
        return True
コード例 #5
0
def core_loop(syncer, config):
    message_queue = syncer.queues['Blockchain']
    init_storage_space(config)

    nodes = {}
    set_ask_for_blocks_hook(storage_space.blockchain, message_queue)
    set_ask_for_txouts_hook(storage_space.blocks_storage, message_queue)
    requests = {}
    message_queue.put({"action": "give nodes list reminder"})

    def send_message(destination, message):
        if not 'id' in message:
            message['id'] = uuid4()
        if not 'sender' in message:
            message['sender'] = "Blockchain"
        syncer.queues[destination].put(message)

    def send_to_nm(message):
        send_message("NetworkManager", message)

    logger.debug("Start of core loop")
    while True:
        sleep(0.05)
        put_back_messages = []
        while not message_queue.empty():
            message = message_queue.get()
            if 'time' in message and message['time'] > time(
            ):  # delay this message
                put_back_messages.append(message)
                continue
            logger.info("Processing message %s" % message)
            if not 'action' in message:  #it is response
                if message['id'] in requests:  # response is awaited
                    if requests[message['id']] == "give nodes list":
                        requests.pop(message['id'])
                        message_queue.put({
                            "action": "take nodes list",
                            "nodes": message["result"]
                        })
                else:
                    pass  #Drop
                continue
            try:
                if message["action"] == "take the headers":
                    process_new_headers(message)
                if message["action"] == "take the blocks":
                    initial_tip = storage_space.blockchain.current_tip
                    process_new_blocks(message)
                    after_tip = storage_space.blockchain.current_tip
                    if not after_tip == initial_tip:
                        notify_all_nodes_about_new_tip(nodes, send_to_nm)
                if message["action"] == "take the txos":
                    process_new_txos(message)
                if message["action"] == "give blocks":
                    process_blocks_request(message, send_message)
                if message["action"] == "give next headers":
                    process_next_headers_request(message, send_message)
                if message["action"] == "give txos":
                    process_txos_request(message, send_message)
                if message["action"] == "find common root":
                    process_find_common_root(message, send_message)
                if message["action"] == "find common root response":
                    process_find_common_root_reponse(message,
                                                     nodes[message["node"]],
                                                     send_message)
                if message["action"] == "give TBM transaction":
                    process_tbm_tx_request(message, send_message)
                if message["action"] == "take TBM transaction":
                    process_tbm_tx(message, send_to_nm, nodes)
                if message["action"] == "give tip height":
                    send_message(
                        message["sender"], {
                            "id": message["id"],
                            "result": storage_space.blockchain.current_height
                        })

                if message["action"] == "take tip info":
                    if not message["node"] in nodes:
                        nodes[message["node"]] = {'node': message["node"]}
                    process_tip_info(message,
                                     nodes[message["node"]],
                                     send=send_to_nm)
            except DOSException as e:
                logger.info("DOS Exception %s" % str(e))
                #raise e #TODO send to NM
            except Exception as e:
                raise e

            if message["action"] == "give block template":
                block = storage_space.mempool_tx.give_block_template()
                ser_head = block.header.serialize()
                send_message(message["sender"], {
                    "id": message["id"],
                    "result": ser_head
                })
            if message["action"] == "take solved block template":
                try:
                    initial_tip = storage_space.blockchain.current_tip
                    header = Header()
                    header.deserialize(message["solved template"])
                    solved_block = storage_space.mempool_tx.get_block_by_header_solution(
                        header)
                    storage_space.headers_manager.add_header(
                        solved_block.header)
                    storage_space.headers_manager.context_validation(
                        solved_block.header.hash)
                    solved_block.non_context_verify()
                    storage_space.blockchain.add_block(solved_block)
                    send_message(message["sender"], {
                        "id": message["id"],
                        "result": "Accepted"
                    })
                    after_tip = storage_space.blockchain.current_tip
                    if not after_tip == initial_tip:
                        notify_all_nodes_about_new_tip(nodes, send_to_nm)
                except Exception as e:
                    raise e
                    send_message(message["sender"], {
                        "id": message["id"],
                        "error": str(e)
                    })

            if message["action"] == "get confirmed balance stats":
                if storage_space.mempool_tx.key_manager:
                    stats = storage_space.mempool_tx.key_manager.get_confirmed_balance_stats(
                        storage_space.utxo_index, storage_space.txos_storage,
                        storage_space.blockchain.current_height)
                    send_message(message["sender"], {
                        "id": message["id"],
                        "result": stats
                    })
                else:
                    send_message(message["sender"], {
                        "id": message["id"],
                        "error": "No registered key manager"
                    })

            if message["action"] == "get confirmed balance list":
                if storage_space.mempool_tx.key_manager:
                    _list = storage_space.mempool_tx.key_manager.get_confirmed_balance_list(
                        storage_space.utxo_index, storage_space.txos_storage,
                        storage_space.blockchain.current_height)
                    send_message(message["sender"], {
                        "id": message["id"],
                        "result": _list
                    })
                else:
                    send_message(message["sender"], {
                        "id": message["id"],
                        "error": "No registered key manager"
                    })

            if message["action"] == "give new address":
                if storage_space.mempool_tx.key_manager:
                    texted_address = storage_space.mempool_tx.key_manager.new_address(
                    ).to_text()
                    send_message(message["sender"], {
                        "id": message["id"],
                        "result": texted_address
                    })
                else:
                    send_message(message["sender"], {
                        "id": message["id"],
                        "error": "No registered key manager"
                    })

            if message["action"] == "give private key":
                if storage_space.mempool_tx.key_manager:
                    km = storage_space.mempool_tx.key_manager
                    a = Address()
                    a.from_text(message["address"])
                    serialized_pk = km.priv_by_address(a).serialize()
                    send_message(message["sender"], {
                        "id": message["id"],
                        "result": serialized_pk
                    })
                else:
                    send_message(message["sender"], {
                        "id": message["id"],
                        "error": "No registered key manager"
                    })

            if message["action"] == "take private key":
                if storage_space.mempool_tx.key_manager:
                    km = storage_space.mempool_tx.key_manager
                    pk = PrivateKey()
                    pk.deserialize(message['privkey'])
                    km.add_privkey(pk)
                    send_message(message["sender"], {
                        "id": message["id"],
                        "result": "imported"
                    })
                else:
                    send_message(message["sender"], {
                        "id": message["id"],
                        "error": "No registered key manager"
                    })

            if message["action"] == "give synchronization status":
                our_height = storage_space.blockchain.current_height
                best_known_header = storage_space.headers_manager.best_header_height
                try:
                    best_advertised_height = max([
                        nodes[node]["height"] for node in nodes
                        if "height" in nodes[node]
                    ])
                except:
                    best_advertised_height = None
                send_message(
                    message["sender"], {
                        "id": message["id"],
                        "result": {
                            'height': our_height,
                            'best_known_header': best_known_header,
                            'best_advertised_height': best_advertised_height
                        }
                    })

            if message["action"] == "send to address":
                value = int(message["value"])
                taddress = message["address"]
                a = Address()
                a.from_text(taddress)
                if storage_space.mempool_tx.key_manager:
                    _list = storage_space.mempool_tx.key_manager.get_confirmed_balance_list(
                        storage_space.utxo_index, storage_space.txos_storage,
                        storage_space.blockchain.current_height)
                    list_to_spend = []
                    summ = 0
                    for address in _list:
                        for texted_index in _list[address]:
                            if summ > value:
                                continue
                            if isinstance(_list[address][texted_index], int):
                                _index = base64.b64decode(
                                    texted_index.encode())
                                utxo = storage_space.txos_storage.confirmed[
                                    _index]
                                if not utxo.lock_height <= storage_space.blockchain.current_height:
                                    continue
                                list_to_spend.append(utxo)
                                summ += _list[address][texted_index]
                    if summ < value:
                        send_message(
                            message["sender"], {
                                "id": message["id"],
                                "error": "Not enough matured coins"
                            })
                    tx = Transaction(
                        txos_storage=storage_space.txos_storage,
                        key_manager=storage_space.mempool_tx.key_manager)
                    for utxo in list_to_spend:
                        tx.push_input(utxo)
                    tx.add_destination((a, value))
                    tx.generate()
                    tx.verify()
                    storage_space.mempool_tx.add_tx(tx)
                    tx_skel = TransactionSkeleton(tx=tx)
                    notify_all_nodes_about_tx(tx_skel.serialize(
                        rich_format=True, max_size=40000),
                                              nodes,
                                              send_to_nm,
                                              _except=[],
                                              mode=1)
                    send_message(message["sender"], {
                        "id": message["id"],
                        "result": "generated"
                    })
                else:
                    send_message(message["sender"], {
                        "id": message["id"],
                        "error": "No registered key manager"
                    })

            #message from core_loop
            if message["action"] == "check txouts download status":
                txos = message["txos_hashes"]
                to_be_downloaded = []
                for txo in txos:
                    if not storage_space.txos_storage.known(txo):
                        to_be_downloaded.append(txo)
                if not to_be_downloaded:
                    continue  #We are good, txouts are already downloaded
                already_asked_nodes = message["already_asked_nodes"]
                asked = False
                for node_params in nodes:
                    node = nodes[node_params]
                    if node in already_asked_nodes:
                        continue
                    already_asked_nodes += [node]
                    send_to_nm({
                        "action": "give txos",
                        "txos_hashes": b"".join(to_be_downloaded),
                        "num": len(to_be_downloaded),
                        "id": str(uuid4()),
                        "node": node_params
                    })
                    new_message = {
                        "action": "check txouts download status",
                        "txos_hashes": to_be_downloaded,
                        "already_asked_nodes": already_asked_nodes,
                        "id": str(uuid4()),
                        "time": int(time() + 300)
                    }
                    asked = True
                    put_back_messages.append(new_message)
                    break
                if not asked:  #We already asked all applicable nodes
                    message["time"] = int(time()) + 3600
                    message["already_asked_nodes"] = []
                    put_back_messages.append(
                        message)  # we will try to ask again in an hour

            #message from core_loop
            if message["action"] == "check blocks download status":
                #TODO download many blocks at once
                block_hashes = message["block_hashes"]
                to_be_downloaded = []
                lowest_height = 1e10
                for block_hash in block_hashes:
                    if block_hash in storage_space.blocks_storage:
                        continue  #We are good, block already downloaded
                    if not block_hash in storage_space.blockchain.awaited_blocks:
                        continue  #For some reason we don't need this block anymore
                    to_be_downloaded.append(block_hash)
                    if storage_space.headers_storage[
                            block_hash].height < lowest_height:
                        lowest_height = storage_space.headers_storage[
                            block_hash].height
                already_asked_nodes = message["already_asked_nodes"]
                asked = False
                for node_params in nodes:
                    node = nodes[node_params]
                    if node in already_asked_nodes:
                        continue
                    if node["height"] < lowest_height:
                        continue
                    already_asked_nodes += [node]
                    send_to_nm({
                        "action": "give blocks",
                        "block_hashes": bytes(b"".join(block_hashes)),
                        'num': len(block_hashes),
                        "id": str(uuid4()),
                        "node": node_params
                    })
                    new_message = {
                        "action": "check blocks download status",
                        "block_hashes": to_be_downloaded,
                        "already_asked_nodes": already_asked_nodes,
                        "id": str(uuid4()),
                        "time": int(time() + 300)
                    }
                    asked = True
                    put_back_messages.append(new_message)
                    break
                if not asked:  #We already asked all applicable nodes
                    message["time"] = int(time()) + 3600
                    message["already_asked_nodes"] = []
                    put_back_messages.append(
                        message)  # we will try to ask again in an hour

            if message["action"] == "take nodes list":
                for node in message["nodes"]:
                    if not node in nodes:  #Do not overwrite
                        nodes[node] = {"node": node}
                disconnected_nodes = []
                for existing_node in nodes:
                    if not existing_node in message["nodes"]:
                        disconnected_nodes.append(existing_node)
                for dn in disconnected_nodes:
                    nodes.pop(dn)

            if message["action"] == "give nodes list reminder":
                _id = str(uuid4())
                send_to_nm({
                    "action": "give nodes list",
                    "sender": "Blockchain",
                    "id": _id
                })
                requests[_id] = "give nodes list"
                put_back_messages.append({
                    "action": "give nodes list reminder",
                    "time": int(time()) + 3
                })

        for _message in put_back_messages:
            message_queue.put(_message)

        try:
            check_sync_status(nodes, send_to_nm)
        except Exception as e:
            logger.error(e)