def send_newtx(new_tx, outer_cur=None, exc_info=True): assert V.PC_OBJ, "PeerClient is None." try: check_tx_time(new_tx) check_tx(new_tx, include_block=None) data = { 'cmd': BroadcastCmd.NEW_TX, 'data': { 'tx': new_tx.b, 'sign': new_tx.signature}} V.PC_OBJ.send_command(cmd=ClientCmd.BROADCAST, data=data) if new_tx.type in (C.TX_VALIDATOR_EDIT, C.TX_CONCLUDE_CONTRACT)and new_tx.hash in tx_builder.unconfirmed: # marge contract signature original_tx = tx_builder.unconfirmed[new_tx.hash] new_signature = list(set(new_tx.signature) | set(original_tx.signature)) original_tx.signature = new_signature logging.info("Marge contract tx {}".format(new_tx)) else: # normal tx tx_builder.put_unconfirmed(new_tx, outer_cur) logging.info("Success broadcast new tx {}".format(new_tx)) return True except Exception as e: logging.warning("Failed broadcast new tx, other nodes don\'t accept {}" .format(new_tx.getinfo())) logging.warning("Reason is \"{}\"".format(e)) logging.debug("traceback,", exc_info=exc_info) return False
def new_tx(data): try: new_tx = TX(binary=data['tx']) new_tx.signature = data['sign'] check_tx_time(new_tx) check_tx(tx=new_tx, include_block=None) if new_tx.type in (C.TX_VALIDATOR_EDIT, C.TX_CONCLUDE_CONTRACT) and new_tx.hash in tx_builder.unconfirmed: # marge contract signature original_tx = tx_builder.unconfirmed[new_tx.hash] new_signature = list(set(new_tx.signature) | set(original_tx.signature)) original_tx.signature = new_signature logging.info("Marge contract tx {}".format(new_tx)) else: # normal tx tx_builder.put_unconfirmed(new_tx) update_mining_staking_all_info() logging.info("Accept new tx {}".format(new_tx)) return True except BlockChainError as e: error = 'Failed accept new tx "{}"'.format(e) logging.error(error) return False except Exception: error = "Failed accept new tx" logging.error(error, exc_info=True) return False
def new_tx(data): try: new_tx = TX(binary=data['tx']) new_tx.signature = data['sign'] check_tx(tx=new_tx, include_block=None) check_tx_time(new_tx) tx_builder.put_unconfirmed(new_tx) update_mining_staking_all_info() logging.info("Accept new tx {}".format(new_tx)) return True except BlockChainError as e: error = 'Failed accept new tx "{}"'.format(e) logging.error(error) add_failed_mark(error) return False except BaseException: error = "Failed accept new tx" logging.error(error, exc_info=True) add_failed_mark(error) return False
def send_newtx(new_tx, outer_cur=None): assert V.PC_OBJ, "PeerClient is None." try: check_tx(new_tx, include_block=None) check_tx_time(new_tx) data = { 'cmd': BroadcastCmd.NEW_TX, 'data': { 'tx': new_tx.b, 'sign': new_tx.signature } } V.PC_OBJ.send_command(cmd=ClientCmd.BROADCAST, data=data) tx_builder.put_unconfirmed(new_tx, outer_cur) logging.info("Success broadcast new tx {}".format(new_tx)) return True except BaseException as e: logging.warning( "Failed broadcast new tx, other nodes don\'t accept {}".format( new_tx.getinfo())) logging.warning("Reason is \"{}\"".format(e)) return False
def new_tx(data): try: new_tx: TX = data['tx'] check_tx_time(new_tx) fill_verified_addr_tx(new_tx) check_tx(tx=new_tx, include_block=None) if new_tx.type in (C.TX_VALIDATOR_EDIT, C.TX_CONCLUDE_CONTRACT): tx_builder.marge_signature(tx=new_tx) else: tx_builder.put_unconfirmed(tx=new_tx) log.info("Accept new tx {}".format(new_tx)) update_info_for_generate(u_block=False, u_unspent=False, u_unconfirmed=True) return True except BlockChainError as e: error = 'Failed accept new tx "{}"'.format(e) log.error(error, exc_info=True) return False except Exception: error = "Failed accept new tx" log.error(error, exc_info=True) return False
def send_newtx(new_tx, outer_cur=None, exc_info=True): assert V.P2P_OBJ, "PeerClient is None" try: check_tx_time(new_tx) check_tx(new_tx, include_block=None) data = {'cmd': BroadcastCmd.NEW_TX, 'data': {'tx': new_tx}} V.P2P_OBJ.send_command(cmd=Peer2PeerCmd.BROADCAST, data=data) if new_tx.type in (C.TX_VALIDATOR_EDIT, C.TX_CONCLUDE_CONTRACT): tx_builder.marge_signature(tx=new_tx) else: tx_builder.put_unconfirmed(tx=new_tx) log.info("Success broadcast new tx {}".format(new_tx)) update_info_for_generate(u_block=False, u_unspent=True, u_unconfirmed=True) return True except Exception as e: log.warning( "Failed broadcast new tx, other nodes don\'t accept {}".format( new_tx.getinfo())) log.warning("Reason is \"{}\"".format(e)) log.debug("traceback,", exc_info=exc_info) return False
def fast_sync_chain(): assert V.PC_OBJ is not None, "Need PeerClient start before." global f_changed_status back_thread = Thread(target=background_sync_chain, name='BackSync', daemon=True) back_thread.start() start = time() # 外部Nodeに次のBlockを逐一尋ねる failed_num = 0 before_block = builder.best_block index_height = before_block.height + 1 logging.debug("Start sync by {}".format(before_block)) while failed_num < 5: if index_height in block_stack: new_block = block_stack[index_height] with write_protect_lock: del block_stack[index_height] elif backend_processing_lock.locked(): sleep(0.1) continue else: with backend_processing_lock: logging.debug( "Stack blocks on front form {}".format(index_height)) r = ask_node(cmd=DirectCmd.BIG_BLOCKS, data={'height': index_height}) if isinstance(r, str): logging.debug("NewBLockGetError:{}".format(r)) before_block = builder.get_block( before_block.previous_hash) index_height = before_block.height + 1 failed_num += 1 continue elif isinstance(r, list): waiter = Waiter(0) waiter.set() waiter = put_to_block_stack(r, waiter) if waiter is None or len(block_stack) == 0: break else: waiter.wait() continue else: failed_num += 1 logging.debug("Not correct format BIG_BLOCKS.") continue # Base check base_check_failed_msg = None if before_block.hash != new_block.previous_hash: base_check_failed_msg = "Not correct previous hash {}".format( new_block) # proof of work check if not new_block.pow_check(): base_check_failed_msg = "Not correct work hash {}".format( new_block) # rollback if base_check_failed_msg is not None: before_block = builder.get_block(before_block.previous_hash) index_height = before_block.height + 1 failed_num += 1 for height in tuple(block_stack.keys()): if height >= index_height: del block_stack[height] logging.debug(base_check_failed_msg) continue # TX check if len(new_block.txs) > 1: with closing(create_db(V.DB_ACCOUNT_PATH)) as db: cur = db.cursor() for tx in new_block.txs: if tx.type in (C.TX_POS_REWARD, C.TX_POW_REWARD): continue check_tx(tx=tx, include_block=None) tx_builder.put_unconfirmed(tx=tx, outer_cur=cur) db.commit() # Block check check_block(new_block) for tx in new_block.txs: tx.height = new_block.height check_tx(tx=tx, include_block=new_block) # Chainに挿入 builder.new_block(new_block) for tx in new_block.txs: user_account.affect_new_tx(tx) builder.batch_apply() f_changed_status = True # 次のBlock failed_num = 0 before_block = new_block index_height = before_block.height + 1 # ロギング if index_height % 100 == 0: logging.debug("Update block {} now...".format(index_height + 1)) # Unconfirmed txを取得 logging.info("Finish get block, next get unconfirmed.") r = None while not isinstance(r, dict): r = ask_node(cmd=DirectCmd.UNCONFIRMED_TX, f_continue_asking=True) for txhash in r['txs']: if txhash in tx_builder.unconfirmed: continue try: r = ask_node(cmd=DirectCmd.TX_BY_HASH, data={'txhash': txhash}, f_continue_asking=True) tx = TX(binary=r['tx']) tx.signature = r['sign'] check_tx_time(tx) check_tx(tx, include_block=None) tx_builder.put_unconfirmed(tx) except BlockChainError: logging.debug("Failed get unconfirmed {}".format( hexlify(txhash).decode())) # 最終判断 reset_good_node() set_good_node() my_best_height = builder.best_block.height best_height_on_network, best_hash_on_network = get_best_conn_info() if best_height_on_network <= my_best_height: logging.info( "Finish update chain data by network. {}Sec [{}<={}]".format( round(time() - start, 1), best_height_on_network, my_best_height)) return True else: logging.debug("Continue update chain, {}<={}".format( best_height_on_network, my_best_height)) return False
def _main_loop(): while not P.F_STOP: sleep(1) if P.F_NOW_BOOTING is False: continue if chain_builder.best_block is None: continue if not back_sync_thread.is_alive(): raise Exception('BackSync is dead!') # start fast sync my_best_block: Block = chain_builder.best_block start_height = my_best_block.height start_time = time() # first of all back_que.put(my_best_block.height + 1) while True: new_block: Block = get_block_from_stack(my_best_block.height + 1) # check blockchain continuity if new_block is None: log.debug( "request height is higher than network height! sync will not need?" ) stack_dict.clear() break if chain_builder.root_block is not None\ and chain_builder.root_block.height is not None\ and new_block.height <= chain_builder.root_block.height: log.error("cannot rollback block depth height={}".format( new_block.height)) P.F_STOP = True return if new_block.hash in chain_builder.chain: log.debug("new block is already known {}".format(new_block)) my_best_block = chain_builder.get_block( blockhash=new_block.hash) continue if my_best_block.hash != new_block.previous_hash: log.debug( "not chained my_best_block with new_block, rollback to {}". format(my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue if len(new_block.txs) <= 0: log.debug("something wrong?, rollback to {}".format( my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue # insert if not new_insert_block( block=new_block, f_time=False, f_sign=False): log.debug("failed to insert new block, rollback to {}".format( my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue # request next chunk if len(stack_dict) < STACK_CHUNK_SIZE: if 0 < len(stack_dict): back_que.put(max(stack_dict) + 1) else: back_que.put(new_block.height + 1) # check reached top height best_height_on_network, best_hash_on_network = get_best_conn_info() if new_block.height < best_height_on_network: my_best_block = new_block continue else: log.info("reached max height of network height={}".format( best_height_on_network)) stack_dict.clear() break # get unconfirmed txs log.info("next get unconfirmed txs") unconfirmed_txhash_set = set() for data in ask_all_nodes(cmd=DirectCmd.UNCONFIRMED_TX): unconfirmed_txhash_set.update(data['txs']) unconfirmed_txs = list() for txhash in unconfirmed_txhash_set: if txhash in tx_builder.unconfirmed: continue try: tx: TX = seek_nodes(cmd=DirectCmd.TX_BY_HASH, data={'txhash': txhash}) tx.height = None fill_verified_addr_tx(tx) unconfirmed_txs.append(tx) except BlockChainError as e: log.debug("1: Failed get unconfirmed {} '{}'".format( txhash.hex(), e)) with create_db(V.DB_ACCOUNT_PATH) as db: cur = db.cursor() for tx in sorted(unconfirmed_txs, key=lambda x: x.time): try: check_tx_time(tx) check_tx(tx, include_block=None) tx_builder.put_unconfirmed(tx=tx, outer_cur=cur) except BlockChainError as e: log.debug("2: Failed get unconfirmed '{}'".format(e)) # fast sync finish log.info("fast sync finished start={} finish={} {}m".format( start_height, chain_builder.best_block.height, int((time() - start_time) / 60))) P.F_NOW_BOOTING = False update_info_for_generate() log.info("close by F_STOP flag")