Esempio n. 1
0
    async def receive_file(self, receiver):
        self.progress_update.start()
        parent, child = AioPipe(False)
        start_time = time()
        receive_process = AioProcess(target=receiver.fetch_data,
                                     args=(self.pipe[1], child))
        receive_process.start()
        await receive_process.coro_join()
        end_time = time() - start_time

        self.progress_update.wait()

        message = QMessageBox()
        message.information(
            self, "Information",
            f"Download complete, time taken {round(end_time / 60, 2)} minutes")

        receiver.save_location = parent.recv()

        self.ui.label_4.setText("Writing File")
        self.ui.progressBar.setValue(0)

        path = await receiver.write_data(receiver.save_location, self)
        rmtree(path)

        self.ui.receiveButton.setEnabled(True)
        self.ui.label_4.setVisible(False)
        self.ui.progressBar.setVisible(False)
        self.ui.label_4.setText("Download in Progress")
        self.ui.progressBar.setValue(0)
Esempio n. 2
0
        async def mine_new_block():
            """Get a new block and start mining.
            If a mining process has already been started, update the process to mine the new block.
            """
            block = await self.create_block_async_func()
            if not block:
                self.input_q.put((None, {}))
                return
            mining_params = self.get_mining_param_func()
            mining_params["consensus_type"] = self.consensus_type
            # handle mining simulation's timing
            if "target_block_time" in mining_params:
                target_block_time = mining_params["target_block_time"]
                mining_params["target_time"] = (
                    block.header.create_time +
                    self._get_block_time(block, target_block_time))
            work = MiningWork(
                block.header.get_hash_for_mining(),
                block.header.height,
                block.header.difficulty,
            )
            self.work_map[work.hash] = block
            if self.process:
                self.input_q.put((work, mining_params))
                return

            self.process = AioProcess(
                target=self.mine_loop,
                args=(work, mining_params, self.input_q, self.output_q),
            )
            self.process.start()
            await handle_mined_block()
Esempio n. 3
0
 def get_work():
     # hash -> shard
     nonlocal work_map, self
     # shard -> work
     existing_work = {}  # type: Dict[int, MiningWork]
     while True:
         for config in self.configs:
             shard_id = config["shard_id"]
             try:
                 work = get_work_rpc(shard_id)
             except Exception:
                 # ignore network errors and try next one
                 logger.error("Failed to get work")
                 continue
             # skip duplicate work
             if (
                 shard_id in existing_work
                 and existing_work[shard_id].hash == work.hash
             ):
                 continue
             mining_params = {
                 "consensus_type": config["consensus_type"],
                 "shard": shard_id,
             }
             if self.process:
                 self.input_q.put((work, mining_params))
                 logger.info(
                     "Pushed work to %s height %d"
                     % (repr_shard(shard_id), work.height)
                 )
             else:
                 # start the process to mine
                 self.process = AioProcess(
                     target=Miner.mine_loop,
                     args=(work, mining_params, self.input_q, self.output_q),
                 )
                 self.process.start()
                 logger.info(
                     "Started mining process for %s" % repr_shard(shard_id)
                 )
             # bookkeeping
             existing_work[shard_id] = work
             work_map[work.hash] = (work, shard_id)
         # random sleep 1~2 secs
         time.sleep(random.uniform(1.0, 2.0))
Esempio n. 4
0
    async def send_file(self, sender):
        self.progress_update.start()

        start_time = time()
        send_process = AioProcess(target=sender.send_data,
                                  args=(self.pipe[1], ))
        send_process.start()
        await send_process.coro_join()
        end_time = time() - start_time

        self.progress_update.wait()

        message = QMessageBox()
        message.information(
            self, "Information",
            f"Transfer complete, time taken {round(end_time / 60, 2)} minutes")

        self.ui.progressBar.setValue(0)
        self.ui.sendButton.setEnabled(True)
        self.ui.labelProgress.setVisible(False)
        self.ui.progressBar.setVisible(False)
Esempio n. 5
0
        async def mine_new_block(instance: Miner):
            """Get a new block and start mining.
            If a mining process has already been started, update the process to mine the new block.
            """
            block = await instance.create_block_async_func()
            mining_params = instance.get_mining_param_func()
            if instance.process:
                instance.input_q.put((block, mining_params))
                return

            instance.process = AioProcess(
                target=instance.mine_func,
                args=(block, instance.input_q, instance.output_q,
                      mining_params),
            )
            instance.process.start()
            await handle_mined_block(instance)
Esempio n. 6
0
class Miner:
    def __init__(
        self,
        consensus_type: ConsensusType,
        create_block_async_func: Callable[[], Awaitable[Optional[Block]]],
        add_block_async_func: Callable[[Block], Awaitable[None]],
        get_mining_param_func: Callable[[], Dict[str, Any]],
        remote: bool = False,
    ):
        """Mining will happen on a subprocess managed by this class

        create_block_async_func: takes no argument, returns a block (either RootBlock or MinorBlock)
        add_block_async_func: takes a block, add it to chain
        get_mining_param_func: takes no argument, returns the mining-specific params
        """
        self.consensus_type = consensus_type

        self.create_block_async_func = create_block_async_func
        self.add_block_async_func = add_block_async_func
        self.get_mining_param_func = get_mining_param_func
        self.enabled = False
        self.process = None

        self.input_q = AioQueue()  # [(MiningWork, param dict)]
        self.output_q = AioQueue()  # [MiningResult]

        # header hash -> work
        self.work_map = {}  # type: Dict[bytes, Block]

        if not remote and consensus_type != ConsensusType.POW_SIMULATE:
            Logger.warning("Mining locally, could be slow and error-prone")
        # remote miner specific attributes
        self.remote = remote
        self.current_work = None  # type: Optional[Block]

    def start(self):
        self.enabled = True
        self._mine_new_block_async()

    def is_enabled(self):
        return self.enabled

    def disable(self):
        """Stop the mining process if there is one"""
        if self.enabled and self.process:
            # end the mining process
            self.input_q.put((None, {}))
        self.enabled = False

    def _mine_new_block_async(self):
        async def handle_mined_block():
            while True:
                res = await self.output_q.coro_get()  # type: MiningResult
                if not res:
                    return  # empty result means ending
                # start mining before processing and propagating mined block
                self._mine_new_block_async()
                block = self.work_map[res.header_hash]
                block.header.nonce = res.nonce
                block.header.mixhash = res.mixhash
                del self.work_map[res.header_hash]
                self._track(block)
                try:
                    # FIXME: Root block should include latest minor block headers while it's being mined
                    # This is a hack to get the latest minor block included since testnet does not check difficulty
                    if self.consensus_type == ConsensusType.POW_SIMULATE:
                        block = await self.create_block_async_func()
                        block.header.nonce = random.randint(0, 2**32 - 1)
                        self._track(block)
                        self._log_status(block)
                    await self.add_block_async_func(block)
                except Exception as ex:
                    Logger.error(ex)

        async def mine_new_block():
            """Get a new block and start mining.
            If a mining process has already been started, update the process to mine the new block.
            """
            block = await self.create_block_async_func()
            if not block:
                self.input_q.put((None, {}))
                return
            mining_params = self.get_mining_param_func()
            # handle mining simulation's timing
            if "target_block_time" in mining_params:
                target_block_time = mining_params["target_block_time"]
                mining_params["target_time"] = (
                    block.header.create_time +
                    self._get_block_time(block, target_block_time))
            work = MiningWork(
                block.header.get_hash_for_mining(),
                block.header.height,
                block.header.difficulty,
            )
            self.work_map[work.hash] = block
            if self.process:
                self.input_q.put((work, mining_params))
                return

            self.process = AioProcess(
                target=self._mine_loop,
                args=(
                    self.consensus_type,
                    work,
                    mining_params,
                    self.input_q,
                    self.output_q,
                ),
            )
            self.process.start()
            await handle_mined_block()

        # no-op if enabled or mining remotely
        if not self.enabled or self.remote:
            return None
        return asyncio.ensure_future(mine_new_block())

    async def get_work(self, now=None) -> MiningWork:
        if not self.remote:
            raise ValueError("Should only be used for remote miner")

        if now is None:  # clock open for mock
            now = time.time()
        # 5 sec interval magic number
        if not self.current_work or now - self.current_work.header.create_time > 5:
            block = await self.create_block_async_func()
            if not block:
                raise RuntimeError("Failed to create block")
            self.current_work = block

        header = self.current_work.header
        header_hash = header.get_hash_for_mining()
        # store in memory for future retrieval during work submission
        self.work_map[header_hash] = self.current_work

        # clean up worker map
        # TODO: for now, same param as go-ethereum
        self.work_map = {
            h: b
            for h, b in self.work_map.items()
            if now - b.header.create_time < 7 * 12
        }

        return MiningWork(header_hash, header.height, header.difficulty)

    async def submit_work(self, header_hash: bytes, nonce: int,
                          mixhash: bytes) -> bool:
        if not self.remote:
            raise ValueError("Should only be used for remote miner")

        if header_hash not in self.work_map:
            return False
        block = self.work_map[header_hash]
        header = copy.copy(block.header)
        header.nonce, header.mixhash = nonce, mixhash
        try:
            validate_seal(header, self.consensus_type)
        except ValueError:
            return False

        block.header = header  # actual update
        try:
            await self.add_block_async_func(block)
            del self.work_map[header_hash]
            self.current_work = None
            return True
        except Exception as ex:
            Logger.error(ex)
            return False

    @staticmethod
    def _mine_loop(
        consensus_type: ConsensusType,
        work: MiningWork,
        mining_params: Dict,
        input_q: MultiProcessingQueue,
        output_q: MultiProcessingQueue,
    ):
        consensus_to_mining_algo = {
            ConsensusType.POW_SIMULATE: Simulate,
            ConsensusType.POW_ETHASH: Ethash,
            ConsensusType.POW_SHA3SHA3: DoubleSHA256,
        }
        mining_algo_gen = consensus_to_mining_algo[consensus_type]
        # TODO: maybe add rounds to config json
        rounds = mining_params.get("rounds", 100)
        progress = {}
        # outer loop for mining forever
        while True:
            # empty work means termination
            if not work:
                output_q.put(None)
                return

            mining_algo = mining_algo_gen(work, **mining_params)
            # progress tracking if mining param contains shard info
            if "shard" in mining_params:
                shard = mining_params["shard"]
                # skip blocks with lower height
                if shard in progress and progress[shard] >= work.height:
                    continue
            # inner loop for iterating nonce
            start_nonce = 0
            while True:
                res = mining_algo.mine(start_nonce + 1,
                                       start_nonce + 1 + rounds)
                if res:
                    output_q.put(res)
                    if "shard" in mining_params:
                        progress[mining_params["shard"]] = work.height
                    work, mining_params = input_q.get(block=True)
                    break  # break inner loop to refresh mining params
                # no result for mining, check if new work arrives
                # if yes, discard current work and restart
                try:
                    work, mining_params = input_q.get_nowait()
                    break  # break inner loop to refresh mining params
                except Exception:  # queue empty
                    pass
                # update param and keep mining
                start_nonce += rounds

    @staticmethod
    def _track(block: Block):
        """Post-process block to track block propagation latency"""
        tracking_data = json.loads(block.tracking_data.decode("utf-8"))
        tracking_data["mined"] = time_ms()
        block.tracking_data = json.dumps(tracking_data).encode("utf-8")

    @staticmethod
    def _log_status(block: Block):
        is_root = isinstance(block, RootBlock)
        shard = "R" if is_root else block.header.branch.get_shard_id()
        count = len(block.minor_block_header_list) if is_root else len(
            block.tx_list)
        elapsed = time.time() - block.header.create_time
        Logger.info_every_sec(
            "[{}] {} [{}] ({:.2f}) {}".format(
                shard,
                block.header.height,
                count,
                elapsed,
                block.header.get_hash().hex(),
            ),
            60,
        )

    @staticmethod
    def _get_block_time(block: Block, target_block_time) -> float:
        if isinstance(block, MinorBlock):
            # Adjust the target block time to compensate computation time
            gas_used_ratio = block.meta.evm_gas_used / block.header.evm_gas_limit
            target_block_time = target_block_time * (1 - gas_used_ratio * 0.4)
            Logger.debug("[{}] target block time {:.2f}".format(
                block.header.branch.get_shard_id(), target_block_time))
        return numpy.random.exponential(target_block_time)
Esempio n. 7
0
class Miner:
    def __init__(
        self,
        consensus_type: ConsensusType,
        create_block_async_func: Callable[..., Awaitable[Optional[Block]]],
        add_block_async_func: Callable[[Block], Awaitable[None]],
        get_mining_param_func: Callable[[], Dict[str, Any]],
        get_header_tip_func: Callable[[], Header],
        remote: bool = False,
        root_signer_private_key: Optional[KeyAPI.PrivateKey] = None,
    ):
        """Mining will happen on a subprocess managed by this class

        create_block_async_func: takes no argument, returns a block (either RootBlock or MinorBlock)
        add_block_async_func: takes a block, add it to chain
        get_mining_param_func: takes no argument, returns the mining-specific params
        """
        self.consensus_type = consensus_type

        self.create_block_async_func = create_block_async_func
        self.add_block_async_func = add_block_async_func
        self.get_mining_param_func = get_mining_param_func
        self.get_header_tip_func = get_header_tip_func
        self.enabled = False
        self.process = None

        self.input_q = AioQueue()  # [(MiningWork, param dict)]
        self.output_q = AioQueue()  # [MiningResult]

        # header hash -> block under work
        # max size (tx max 258 bytes, gas limit 12m) ~= ((12m / 21000) * 258) * 128 = 18mb
        self.work_map = LRUCache(maxsize=128)

        if not remote and consensus_type != ConsensusType.POW_SIMULATE:
            Logger.warning("Mining locally, could be slow and error-prone")
        # remote miner specific attributes
        self.remote = remote
        # coinbase address -> header hash
        # key can be None, meaning default coinbase address from local config
        self.current_works = LRUCache(128)
        self.root_signer_private_key = root_signer_private_key

    def start(self):
        self.enabled = True
        self._mine_new_block_async()

    def is_enabled(self):
        return self.enabled

    def disable(self):
        """Stop the mining process if there is one"""
        if self.enabled and self.process:
            # end the mining process
            self.input_q.put((None, {}))
        self.enabled = False

    def _mine_new_block_async(self):
        async def handle_mined_block():
            while True:
                res = await self.output_q.coro_get()  # type: MiningResult
                if not res:
                    return  # empty result means ending
                # start mining before processing and propagating mined block
                self._mine_new_block_async()
                block = self.work_map[res.header_hash]
                block.header.nonce = res.nonce
                block.header.mixhash = res.mixhash
                del self.work_map[res.header_hash]
                self._track(block)
                try:
                    # FIXME: Root block should include latest minor block headers while it's being mined
                    # This is a hack to get the latest minor block included since testnet does not check difficulty
                    if self.consensus_type == ConsensusType.POW_SIMULATE:
                        block = await self.create_block_async_func(
                            Address.create_empty_account())
                        block.header.nonce = random.randint(0, 2**32 - 1)
                        self._track(block)
                        self._log_status(block)
                    await self.add_block_async_func(block)
                except Exception:
                    Logger.error_exception()

        async def mine_new_block():
            """Get a new block and start mining.
            If a mining process has already been started, update the process to mine the new block.
            """
            block = await self.create_block_async_func(
                Address.create_empty_account())
            if not block:
                self.input_q.put((None, {}))
                return
            mining_params = self.get_mining_param_func()
            mining_params["consensus_type"] = self.consensus_type
            # handle mining simulation's timing
            if "target_block_time" in mining_params:
                target_block_time = mining_params["target_block_time"]
                mining_params["target_time"] = (
                    block.header.create_time +
                    self._get_block_time(block, target_block_time))
            work = MiningWork(
                block.header.get_hash_for_mining(),
                block.header.height,
                block.header.difficulty,
            )
            self.work_map[work.hash] = block
            if self.process:
                self.input_q.put((work, mining_params))
                return

            self.process = AioProcess(
                target=self.mine_loop,
                args=(work, mining_params, self.input_q, self.output_q),
            )
            self.process.start()
            await handle_mined_block()

        # no-op if enabled or mining remotely
        if not self.enabled or self.remote:
            return None
        return asyncio.ensure_future(mine_new_block())

    async def get_work(self,
                       coinbase_addr: Address,
                       now=None) -> (MiningWork, Block):
        if not self.remote:
            raise ValueError("Should only be used for remote miner")

        if now is None:  # clock open for mock
            now = time.time()

        block = None
        header_hash = self.current_works.get(coinbase_addr)
        if header_hash:
            block = self.work_map.get(header_hash)
        tip_hash = self.get_header_tip_func().get_hash()
        if (not block  # no work cache
                or block.header.hash_prev_block != tip_hash  # cache outdated
                or now - block.header.create_time > 10  # stale
            ):
            block = await self.create_block_async_func(coinbase_addr,
                                                       retry=False)
            if not block:
                raise RuntimeError("Failed to create block")
            header_hash = block.header.get_hash_for_mining()
            self.current_works[coinbase_addr] = header_hash
            self.work_map[header_hash] = block

        header = block.header
        return (
            MiningWork(header_hash, header.height, header.difficulty),
            copy.deepcopy(block),
        )

    async def submit_work(
        self,
        header_hash: bytes,
        nonce: int,
        mixhash: bytes,
        signature: Optional[bytes] = None,
    ) -> bool:
        if not self.remote:
            raise ValueError("Should only be used for remote miner")

        if header_hash not in self.work_map:
            return False
        # this copy is necessary since there might be multiple submissions concurrently
        block = copy.deepcopy(self.work_map[header_hash])
        header = block.header

        # reject if tip updated
        tip_hash = self.get_header_tip_func().get_hash()
        if header.hash_prev_block != tip_hash:
            del self.work_map[header_hash]
            return False

        header.nonce, header.mixhash = nonce, mixhash
        # sign using the root_signer_private_key
        if self.root_signer_private_key and isinstance(block, RootBlock):
            header.sign_with_private_key(self.root_signer_private_key)

        # remote sign as a guardian
        if isinstance(block, RootBlock) and signature is not None:
            header.signature = signature

        try:
            await self.add_block_async_func(block)
            # a previous submission of the same work could have removed the key
            if header_hash in self.work_map:
                del self.work_map[header_hash]
            return True
        except Exception:
            Logger.error_exception()
            return False

    @staticmethod
    def mine_loop(
        work: Optional[MiningWork],
        mining_params: Dict,
        input_q: Queue,
        output_q: Queue,
        debug=False,
    ):
        consensus_to_mining_algo = {
            ConsensusType.POW_SIMULATE: Simulate,
            ConsensusType.POW_ETHASH: Ethash,
            ConsensusType.POW_QKCHASH: Qkchash,
            ConsensusType.POW_DOUBLESHA256: DoubleSHA256,
        }
        progress = {}

        def debug_log(msg: str, prob: float):
            if not debug:
                return
            random.random() < prob and print(msg)

        try:
            # outer loop for mining forever
            while True:
                # empty work means termination
                if not work:
                    output_q.put(None)
                    return

                debug_log("outer mining loop", 0.1)
                consensus_type = mining_params["consensus_type"]
                mining_algo_gen = consensus_to_mining_algo[consensus_type]
                mining_algo = mining_algo_gen(work, **mining_params)
                # progress tracking if mining param contains shard info
                if "full_shard_id" in mining_params:
                    full_shard_id = mining_params["full_shard_id"]
                    # skip blocks with height lower or equal
                    if (full_shard_id in progress
                            and progress[full_shard_id] >= work.height):
                        # get newer work and restart mining
                        debug_log("stale work, try to get new one", 1.0)
                        work, mining_params = input_q.get(block=True)
                        continue

                rounds = mining_params.get("rounds", 100)
                start_nonce = random.randint(0, MAX_NONCE)
                # inner loop for iterating nonce
                while True:
                    if start_nonce > MAX_NONCE:
                        start_nonce = 0
                    end_nonce = min(start_nonce + rounds, MAX_NONCE + 1)
                    res = mining_algo.mine(start_nonce,
                                           end_nonce)  # [start, end)
                    debug_log("one round of mining", 0.01)
                    if res:
                        debug_log("mining success", 1.0)
                        output_q.put(res)
                        if "full_shard_id" in mining_params:
                            progress[
                                mining_params["full_shard_id"]] = work.height
                        work, mining_params = input_q.get(block=True)
                        break  # break inner loop to refresh mining params
                    # no result for mining, check if new work arrives
                    # if yes, discard current work and restart
                    try:
                        work, mining_params = input_q.get_nowait()
                        break  # break inner loop to refresh mining params
                    except QueueEmpty:
                        debug_log("empty queue", 0.1)
                        pass
                    # update param and keep mining
                    start_nonce += rounds
        except:
            from sys import exc_info

            exc_type, exc_obj, exc_trace = exc_info()
            print("exc_type", exc_type)
            print("exc_obj", exc_obj)
            print("exc_trace", exc_trace)

    @staticmethod
    def _track(block: Block):
        """Post-process block to track block propagation latency"""
        tracking_data = json.loads(block.tracking_data.decode("utf-8"))
        tracking_data["mined"] = time_ms()
        block.tracking_data = json.dumps(tracking_data).encode("utf-8")

    @staticmethod
    def _log_status(block: Block):
        is_root = isinstance(block, RootBlock)
        full_shard_id = "R" if is_root else block.header.branch.get_full_shard_id(
        )
        count = len(block.minor_block_header_list) if is_root else len(
            block.tx_list)
        elapsed = time.time() - block.header.create_time
        Logger.info_every_sec(
            "[{}] {} [{}] ({:.2f}) {}".format(
                full_shard_id,
                block.header.height,
                count,
                elapsed,
                block.header.get_hash().hex(),
            ),
            60,
        )

    @staticmethod
    def _get_block_time(block: Block, target_block_time) -> float:
        if isinstance(block, MinorBlock):
            # Adjust the target block time to compensate computation time
            gas_used_ratio = block.meta.evm_gas_used / block.header.evm_gas_limit
            target_block_time = target_block_time * (1 - gas_used_ratio * 0.4)
            Logger.debug("[{}] target block time {:.2f}".format(
                block.header.branch.get_full_shard_id(), target_block_time))
        return numpy.random.exponential(target_block_time)
Esempio n. 8
0
class ExternalMiner(threading.Thread):
    """One external miner could handles multiple shards."""

    def __init__(self, configs):
        super().__init__()
        self.configs = configs
        self.input_q = AioQueue()
        self.output_q = AioQueue()
        self.process = None

    def run(self):
        work_map = {}  # type: Dict[bytes, Tuple[MiningWork, int]]

        # start the thread to get work
        def get_work():
            # hash -> shard
            nonlocal work_map, self
            # shard -> work
            existing_work = {}  # type: Dict[int, MiningWork]
            while True:
                for config in self.configs:
                    shard_id = config["shard_id"]
                    try:
                        work = get_work_rpc(shard_id)
                    except Exception:
                        # ignore network errors and try next one
                        logger.error("Failed to get work")
                        continue
                    # skip duplicate work
                    if (
                        shard_id in existing_work
                        and existing_work[shard_id].hash == work.hash
                    ):
                        continue
                    mining_params = {
                        "consensus_type": config["consensus_type"],
                        "shard": shard_id,
                    }
                    if self.process:
                        self.input_q.put((work, mining_params))
                        logger.info(
                            "Pushed work to %s height %d"
                            % (repr_shard(shard_id), work.height)
                        )
                    else:
                        # start the process to mine
                        self.process = AioProcess(
                            target=Miner.mine_loop,
                            args=(work, mining_params, self.input_q, self.output_q),
                        )
                        self.process.start()
                        logger.info(
                            "Started mining process for %s" % repr_shard(shard_id)
                        )
                    # bookkeeping
                    existing_work[shard_id] = work
                    work_map[work.hash] = (work, shard_id)
                # random sleep 1~2 secs
                time.sleep(random.uniform(1.0, 2.0))

        get_work_thread = threading.Thread(target=get_work)
        get_work_thread.start()

        # the current thread handles the work submission
        while True:
            res = self.output_q.get(block=True)  # type: MiningResult
            work, shard_id = work_map[res.header_hash]
            while True:
                try:
                    success = submit_work_rpc(shard_id, res)
                    break
                except Exception:
                    logger.error("Failed to submit work, backing off...")
                    time.sleep(0.5)

            logger.info(
                "Mining result submission result: %s for %s height %d"
                % (
                    "success" if success else "failure",
                    repr_shard(shard_id),
                    work.height,
                )
            )
            del work_map[res.header_hash]  # clear bookkeeping