def new_block(data): try: new_block = fill_newblock_info(data) except BlockChainError as e: warning = 'Do not accept block "{}"'.format(e) logging.warning(warning) return False except BaseException: error = "error on accept new block" logging.error(error, exc_info=True) add_failed_mark(error) return False try: if new_insert_block(new_block, time_check=True): update_mining_staking_all_info() logging.info("Accept new block {}".format(new_block)) return True else: return False except BlockChainError as e: error = 'Failed accept new block "{}"'.format(e) logging.error(error, exc_info=True) return False except BaseException: error = "error on accept new block" logging.error(error, exc_info=True) add_failed_mark(error) return False
def new_block(data): try: new_block = fill_newblock_info(data) except BlockChainError as e: warning = 'Do not accept block "{}"'.format(e) log.warning(warning) return False except Exception: error = "error on accept new block" log.error(error, exc_info=True) return False try: if new_insert_block(new_block): update_info_for_generate() log.info("Accept new block {}".format(new_block)) return True else: return False except BlockChainError as e: error = 'Failed accept new block "{}"'.format(e) log.error(error, exc_info=True) return False except Exception: error = "error on accept new block" log.error(error, exc_info=True) return False
def load_bootstrap_file(): boot_path = os.path.join(V.DB_HOME_DIR, 'bootstrap.dat') with open(boot_path, mode='br') as fp: b_data = fp.readline() block = None while b_data: block = pickle.loads(b64decode(b_data.rstrip())) for tx in block.txs: tx.height = None if tx.type in (C.TX_POW_REWARD, C.TX_POS_REWARD): continue tx_builder.put_unconfirmed(tx) for tx in block.txs: tx.height = block.height new_insert_block(block=block, time_check=False) b_data = fp.readline() logging.debug("load bootstrap.dat! last={}".format(block))
def load_bootstrap_file(boot_path=None): boot_path = boot_path or os.path.join( V.DB_HOME_DIR, 'bootstrap-ver{}.dat.gz'.format(__chain_version__)) if not os.path.exists(boot_path): log.warning("Not found, skip import bootstrap.dat.gz") return log.info("Start to load blocks from bootstrap.dat.gz") s = time() with gzip.open(boot_path, mode='rb') as fp: block = None for block, work_hash, _bias in msgpack.stream_unpacker(fp): block.work_hash = work_hash block._bias = _bias for tx in block.txs: tx.height = block.height new_insert_block(block=block, f_time=False, f_sign=True) if block.height % 1000 == 0: print("Load block now {} height {}Sec".format( block.height, round(time() - s))) log.info("load bootstrap.dat.gz finished, last={} {}Minutes".format( block, (time() - s) // 60))
def mined_newblock(que): """new thread, broadcast mined block to network""" assert V.P2P_OBJ, "PeerClient is None" while not P.F_STOP: try: new_block = que.get(timeout=1) new_block.create_time = int(time()) if P.F_NOW_BOOTING: log.debug("self reject, mined but now booting") continue elif new_block.height != chain_builder.best_block.height + 1: log.debug("self reject, mined but its old block") continue else: log.debug("Mined block check success") if new_insert_block(new_block): log.info("Mined new block {}".format(new_block.getinfo())) else: log.debug("self reject, cannot new insert") update_info_for_generate() continue proof_tx = new_block.txs[0] txs_hash_list = [tx.hash for tx in new_block.txs] data = { 'cmd': BroadcastCmd.NEW_BLOCK, 'data': { 'binary': new_block.b, 'height': new_block.height, 'txs': txs_hash_list, 'proof': proof_tx, 'block_flag': new_block.flag, } } try: V.P2P_OBJ.send_command(cmd=Peer2PeerCmd.BROADCAST, data=data) log.info("Success broadcast new block {}".format(new_block)) update_info_for_generate() except TimeoutError: log.warning( "Failed broadcast new block, other nodes don\'t accept {}". format(new_block.getinfo())) except queue.Empty: if V.P2P_OBJ.f_stop: log.debug("Mined new block closed") break except BlockChainError as e: log.error('Failed mined new block "{}"'.format(e)) except Exception as e: log.error("mined_newblock()", exc_info=True)
def mined_newblock(que, pc): # 新規採掘BlockをP2Pに公開 while True: try: new_block = que.get(timeout=1) new_block.create_time = int(time.time()) if P.F_NOW_BOOTING: logging.debug("Mined but now booting..") continue elif new_block.height != builder.best_block.height + 1: logging.debug("Mined but its old block...") continue elif new_insert_block(new_block, time_check=True): logging.info("Mined new block {}".format(new_block.getinfo())) else: update_mining_staking_all_info() continue proof = new_block.txs[0] others = [tx.hash for tx in new_block.txs] data = { 'cmd': BroadcastCmd.NEW_BLOCK, 'data': { 'block': new_block.b, 'txs': others, 'proof': proof.b, 'block_flag': new_block.flag, 'sign': proof.signature } } try: pc.send_command(cmd=ClientCmd.BROADCAST, data=data) logging.info( "Success broadcast new block {}".format(new_block)) update_mining_staking_all_info() except TimeoutError: logging.warning( "Failed broadcast new block, other nodes don\'t accept {}". format(new_block.getinfo())) P.F_NOW_BOOTING = True except queue.Empty: if pc.f_stop: logging.debug("Mined new block closed.") break except BlockChainError as e: logging.error('Failed mined new block "{}"'.format(e)) except Exception as e: logging.error("mined_newblock()", exc_info=True)
def fill_newblock_info(data): new_block = Block(binary=data['block']) logging.debug("Fill newblock={}".format(hexlify(new_block.hash).decode())) proof = TX(binary=data['proof']) new_block.txs.append(proof) new_block.flag = data['block_flag'] proof.signature = data['sign'] # Check the block is correct info if not new_block.pow_check(): raise BlockChainError('Proof of work is not satisfied.') my_block = builder.get_block(new_block.hash) if my_block: raise BlockChainError('Already inserted block {}'.format(my_block)) before_block = builder.get_block(new_block.previous_hash) if before_block is None: logging.debug("Cannot find beforeBlock {}, try to ask outside node." .format(hexlify(new_block.previous_hash).decode())) # not found beforeBlock, need to check other node have the the block new_block.inner_score *= 0.70 # unknown previousBlock, score down before_block = make_block_by_node(blockhash=new_block.previous_hash) if not new_insert_block(before_block, time_check=True): # require time_check, it was generated only a few seconds ago # print([block for block in builder.chain.values()]) raise BlockChainError('Failed insert beforeBlock {}'.format(before_block)) new_height = before_block.height + 1 proof.height = new_height new_block.height = new_height # Append general txs for txhash in data['txs'][1:]: tx = tx_builder.get_tx(txhash) if tx is None: new_block.inner_score *= 0.75 # unknown tx, score down logging.debug("Unknown tx, try to download.") r = ask_node(cmd=DirectCmd.TX_BY_HASH, data={'txhash': txhash}, f_continue_asking=True) if isinstance(r, str): raise BlockChainError('Failed unknown tx download "{}"'.format(r)) tx = TX(binary=r['tx']) tx.signature = r['sign'] check_tx(tx, include_block=None) tx_builder.put_unconfirmed(tx) logging.debug("Success unknown tx download {}".format(tx)) tx.height = new_height new_block.txs.append(tx) return new_block
def make_block_by_node(blockhash, depth): """ create Block by outside node """ log.debug("make block by node depth={} hash={}".format( depth, blockhash.hex())) block: Block = seek_nodes(cmd=DirectCmd.BLOCK_BY_HASH, data={'blockhash': blockhash}) before_block = chain_builder.get_block(blockhash=block.previous_hash) if before_block is None: if depth < C.MAX_RECURSIVE_BLOCK_DEPTH: before_block = make_block_by_node(blockhash=block.previous_hash, depth=depth + 1) else: raise BlockChainError( 'Cannot recursive get block depth={} hash={}'.format( depth, block.previous_hash.hex())) height = before_block.height + 1 block.height = height block.inner_score *= 0.70 for tx in block.txs: tx.height = height if not new_insert_block(block=block, f_time=False, f_sign=True): raise BlockChainError( 'Failed insert beforeBlock {}'.format(before_block)) return block
def _main_loop(): while not P.F_STOP: sleep(1) if P.F_NOW_BOOTING is False: continue if chain_builder.best_block is None: continue if not back_sync_thread.is_alive(): raise Exception('BackSync is dead!') # start fast sync my_best_block: Block = chain_builder.best_block start_height = my_best_block.height start_time = time() # first of all back_que.put(my_best_block.height + 1) while True: new_block: Block = get_block_from_stack(my_best_block.height + 1) # check blockchain continuity if new_block is None: log.debug( "request height is higher than network height! sync will not need?" ) stack_dict.clear() break if chain_builder.root_block is not None\ and chain_builder.root_block.height is not None\ and new_block.height <= chain_builder.root_block.height: log.error("cannot rollback block depth height={}".format( new_block.height)) P.F_STOP = True return if new_block.hash in chain_builder.chain: log.debug("new block is already known {}".format(new_block)) my_best_block = chain_builder.get_block( blockhash=new_block.hash) continue if my_best_block.hash != new_block.previous_hash: log.debug( "not chained my_best_block with new_block, rollback to {}". format(my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue if len(new_block.txs) <= 0: log.debug("something wrong?, rollback to {}".format( my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue # insert if not new_insert_block( block=new_block, f_time=False, f_sign=False): log.debug("failed to insert new block, rollback to {}".format( my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue # request next chunk if len(stack_dict) < STACK_CHUNK_SIZE: if 0 < len(stack_dict): back_que.put(max(stack_dict) + 1) else: back_que.put(new_block.height + 1) # check reached top height best_height_on_network, best_hash_on_network = get_best_conn_info() if new_block.height < best_height_on_network: my_best_block = new_block continue else: log.info("reached max height of network height={}".format( best_height_on_network)) stack_dict.clear() break # get unconfirmed txs log.info("next get unconfirmed txs") unconfirmed_txhash_set = set() for data in ask_all_nodes(cmd=DirectCmd.UNCONFIRMED_TX): unconfirmed_txhash_set.update(data['txs']) unconfirmed_txs = list() for txhash in unconfirmed_txhash_set: if txhash in tx_builder.unconfirmed: continue try: tx: TX = seek_nodes(cmd=DirectCmd.TX_BY_HASH, data={'txhash': txhash}) tx.height = None fill_verified_addr_tx(tx) unconfirmed_txs.append(tx) except BlockChainError as e: log.debug("1: Failed get unconfirmed {} '{}'".format( txhash.hex(), e)) with create_db(V.DB_ACCOUNT_PATH) as db: cur = db.cursor() for tx in sorted(unconfirmed_txs, key=lambda x: x.time): try: check_tx_time(tx) check_tx(tx, include_block=None) tx_builder.put_unconfirmed(tx=tx, outer_cur=cur) except BlockChainError as e: log.debug("2: Failed get unconfirmed '{}'".format(e)) # fast sync finish log.info("fast sync finished start={} finish={} {}m".format( start_height, chain_builder.best_block.height, int((time() - start_time) / 60))) P.F_NOW_BOOTING = False update_info_for_generate() log.info("close by F_STOP flag")