def send_newtx(new_tx, outer_cur=None, exc_info=True): assert V.PC_OBJ, "PeerClient is None." try: check_tx_time(new_tx) check_tx(new_tx, include_block=None) data = { 'cmd': BroadcastCmd.NEW_TX, 'data': { 'tx': new_tx.b, 'sign': new_tx.signature}} V.PC_OBJ.send_command(cmd=ClientCmd.BROADCAST, data=data) if new_tx.type in (C.TX_VALIDATOR_EDIT, C.TX_CONCLUDE_CONTRACT)and new_tx.hash in tx_builder.unconfirmed: # marge contract signature original_tx = tx_builder.unconfirmed[new_tx.hash] new_signature = list(set(new_tx.signature) | set(original_tx.signature)) original_tx.signature = new_signature logging.info("Marge contract tx {}".format(new_tx)) else: # normal tx tx_builder.put_unconfirmed(new_tx, outer_cur) logging.info("Success broadcast new tx {}".format(new_tx)) return True except Exception as e: logging.warning("Failed broadcast new tx, other nodes don\'t accept {}" .format(new_tx.getinfo())) logging.warning("Reason is \"{}\"".format(e)) logging.debug("traceback,", exc_info=exc_info) return False
def new_tx(data): try: new_tx = TX(binary=data['tx']) new_tx.signature = data['sign'] check_tx_time(new_tx) check_tx(tx=new_tx, include_block=None) if new_tx.type in (C.TX_VALIDATOR_EDIT, C.TX_CONCLUDE_CONTRACT) and new_tx.hash in tx_builder.unconfirmed: # marge contract signature original_tx = tx_builder.unconfirmed[new_tx.hash] new_signature = list(set(new_tx.signature) | set(original_tx.signature)) original_tx.signature = new_signature logging.info("Marge contract tx {}".format(new_tx)) else: # normal tx tx_builder.put_unconfirmed(new_tx) update_mining_staking_all_info() logging.info("Accept new tx {}".format(new_tx)) return True except BlockChainError as e: error = 'Failed accept new tx "{}"'.format(e) logging.error(error) return False except Exception: error = "Failed accept new tx" logging.error(error, exc_info=True) return False
def load_bootstrap_file(): boot_path = os.path.join(V.DB_HOME_DIR, 'bootstrap.dat') with open(boot_path, mode='br') as fp: b_data = fp.readline() block = None while b_data: block = pickle.loads(b64decode(b_data.rstrip())) for tx in block.txs: tx.height = None if tx.type in (C.TX_POW_REWARD, C.TX_POS_REWARD): continue tx_builder.put_unconfirmed(tx) for tx in block.txs: tx.height = block.height new_insert_block(block=block, time_check=False) b_data = fp.readline() logging.debug("load bootstrap.dat! last={}".format(block))
def fill_newblock_info(data): new_block = Block(binary=data['block']) logging.debug("Fill newblock={}".format(hexlify(new_block.hash).decode())) proof = TX(binary=data['proof']) new_block.txs.append(proof) new_block.flag = data['block_flag'] proof.signature = data['sign'] # Check the block is correct info if not new_block.pow_check(): raise BlockChainError('Proof of work is not satisfied.') my_block = builder.get_block(new_block.hash) if my_block: raise BlockChainError('Already inserted block {}'.format(my_block)) before_block = builder.get_block(new_block.previous_hash) if before_block is None: logging.debug("Cannot find beforeBlock {}, try to ask outside node." .format(hexlify(new_block.previous_hash).decode())) # not found beforeBlock, need to check other node have the the block new_block.inner_score *= 0.70 # unknown previousBlock, score down before_block = make_block_by_node(blockhash=new_block.previous_hash) if not new_insert_block(before_block, time_check=True): # require time_check, it was generated only a few seconds ago # print([block for block in builder.chain.values()]) raise BlockChainError('Failed insert beforeBlock {}'.format(before_block)) new_height = before_block.height + 1 proof.height = new_height new_block.height = new_height # Append general txs for txhash in data['txs'][1:]: tx = tx_builder.get_tx(txhash) if tx is None: new_block.inner_score *= 0.75 # unknown tx, score down logging.debug("Unknown tx, try to download.") r = ask_node(cmd=DirectCmd.TX_BY_HASH, data={'txhash': txhash}, f_continue_asking=True) if isinstance(r, str): raise BlockChainError('Failed unknown tx download "{}"'.format(r)) tx = TX(binary=r['tx']) tx.signature = r['sign'] check_tx(tx, include_block=None) tx_builder.put_unconfirmed(tx) logging.debug("Success unknown tx download {}".format(tx)) tx.height = new_height new_block.txs.append(tx) return new_block
def fill_newblock_info(data): new_block: Block = Block.from_binary(binary=data['binary']) log.debug("fill newblock height={} newblock={}".format( data.get('height'), new_block.hash.hex())) proof: TX = data['proof'] new_block.txs.append(proof) new_block.flag = data['block_flag'] my_block = chain_builder.get_block(new_block.hash) if my_block: raise BlockChainError('Already inserted block {}'.format(my_block)) before_block = chain_builder.get_block(new_block.previous_hash) if before_block is None: log.debug("Cannot find beforeBlock, try to ask outside node") # not found beforeBlock, need to check other node have the the block new_block.inner_score *= 0.70 # unknown previousBlock, score down before_block = make_block_by_node(blockhash=new_block.previous_hash, depth=0) new_height = before_block.height + 1 proof.height = new_height new_block.height = new_height # work check # TODO: correct position? if not new_block.pow_check(): raise BlockChainError('Proof of work is not satisfied') # Append general txs for txhash in data['txs'][1:]: tx = tx_builder.get_tx(txhash) if tx is None: new_block.inner_score *= 0.75 # unknown tx, score down log.debug("Unknown tx, try to download") r = ask_node(cmd=DirectCmd.TX_BY_HASH, data={'txhash': txhash}, f_continue_asking=True) if isinstance(r, str): raise BlockChainError( 'Failed unknown tx download "{}"'.format(r)) tx: TX = r tx.height = None check_tx(tx, include_block=None) tx_builder.put_unconfirmed(tx) log.debug("Success unknown tx download {}".format(tx)) tx.height = new_height new_block.txs.append(tx) return new_block
def new_tx(data): try: new_tx = TX(binary=data['tx']) new_tx.signature = data['sign'] check_tx(tx=new_tx, include_block=None) check_tx_time(new_tx) tx_builder.put_unconfirmed(new_tx) update_mining_staking_all_info() logging.info("Accept new tx {}".format(new_tx)) return True except BlockChainError as e: error = 'Failed accept new tx "{}"'.format(e) logging.error(error) add_failed_mark(error) return False except BaseException: error = "Failed accept new tx" logging.error(error, exc_info=True) add_failed_mark(error) return False
def send_newtx(new_tx, outer_cur=None): assert V.PC_OBJ, "PeerClient is None." try: check_tx(new_tx, include_block=None) check_tx_time(new_tx) data = { 'cmd': BroadcastCmd.NEW_TX, 'data': { 'tx': new_tx.b, 'sign': new_tx.signature } } V.PC_OBJ.send_command(cmd=ClientCmd.BROADCAST, data=data) tx_builder.put_unconfirmed(new_tx, outer_cur) logging.info("Success broadcast new tx {}".format(new_tx)) return True except BaseException as e: logging.warning( "Failed broadcast new tx, other nodes don\'t accept {}".format( new_tx.getinfo())) logging.warning("Reason is \"{}\"".format(e)) return False
def fill_newblock_info(data): new_block = Block(binary=data['block']) proof = TX(binary=data['proof']) new_block.txs.append(proof) new_block.flag = data['block_flag'] proof.signature = data['sign'] # Check the block is correct info if not new_block.pow_check(): raise BlockChainError('Proof of work is not satisfied.') if builder.get_block(new_block.hash): raise BlockChainError('Already inserted block.') before_block = builder.get_block(new_block.previous_hash) if before_block is None: raise BlockChainError('Not found beforeBlock {}.'.format( hexlify(new_block.previous_hash).decode())) new_height = before_block.height + 1 proof.height = new_height new_block.height = new_height # Append general txs for txhash in data['txs'][1:]: tx = tx_builder.get_tx(txhash) if tx is None: new_block.inner_score *= 0.75 # unknown tx, score down logging.debug("Unknown tx, try to download.") r = ask_node(cmd=DirectCmd.TX_BY_HASH, data={'txhash': txhash}, f_continue_asking=True) if isinstance(r, str): raise BlockChainError( 'Failed unknown tx download "{}"'.format(r)) tx = TX(binary=r['tx']) tx.signature = r['sign'] check_tx(tx, include_block=None) tx_builder.put_unconfirmed(tx) logging.debug("Success unknown tx download {}".format(tx)) tx.height = new_height new_block.txs.append(tx) return new_block
def new_tx(data): try: new_tx: TX = data['tx'] check_tx_time(new_tx) fill_verified_addr_tx(new_tx) check_tx(tx=new_tx, include_block=None) if new_tx.type in (C.TX_VALIDATOR_EDIT, C.TX_CONCLUDE_CONTRACT): tx_builder.marge_signature(tx=new_tx) else: tx_builder.put_unconfirmed(tx=new_tx) log.info("Accept new tx {}".format(new_tx)) update_info_for_generate(u_block=False, u_unspent=False, u_unconfirmed=True) return True except BlockChainError as e: error = 'Failed accept new tx "{}"'.format(e) log.error(error, exc_info=True) return False except Exception: error = "Failed accept new tx" log.error(error, exc_info=True) return False
def send_newtx(new_tx, outer_cur=None, exc_info=True): assert V.P2P_OBJ, "PeerClient is None" try: check_tx_time(new_tx) check_tx(new_tx, include_block=None) data = {'cmd': BroadcastCmd.NEW_TX, 'data': {'tx': new_tx}} V.P2P_OBJ.send_command(cmd=Peer2PeerCmd.BROADCAST, data=data) if new_tx.type in (C.TX_VALIDATOR_EDIT, C.TX_CONCLUDE_CONTRACT): tx_builder.marge_signature(tx=new_tx) else: tx_builder.put_unconfirmed(tx=new_tx) log.info("Success broadcast new tx {}".format(new_tx)) update_info_for_generate(u_block=False, u_unspent=True, u_unconfirmed=True) return True except Exception as e: log.warning( "Failed broadcast new tx, other nodes don\'t accept {}".format( new_tx.getinfo())) log.warning("Reason is \"{}\"".format(e)) log.debug("traceback,", exc_info=exc_info) return False
def _update_unconfirmed_info(): with unconfirmed_lock: s = time() prune_limit = s - 10 # 1: check upgradable pre-unconfirmed for tx in sorted(tx_builder.pre_unconfirmed.values(), key=lambda x: x.create_time): try: if tx.create_time > prune_limit: continue # too young tx if not (tx.time - C.ACCEPT_MARGIN_TIME < s - V.BLOCK_GENESIS_TIME < tx.deadline + C.ACCEPT_MARGIN_TIME): del tx_builder.pre_unconfirmed[tx.hash] log.debug("Remove from pre-unconfirmed, over deadline. {}".format(tx)) continue if tx.hash in tx_builder.unconfirmed: del tx_builder.pre_unconfirmed[tx.hash] log.debug("Remove from pre-unconfirmed, already unconfirmed. {}".format(tx)) continue # check by tx type if tx.type == C.TX_CONCLUDE_CONTRACT: c_address, start_hash, c_storage = tx.encoded_message() c = get_contract_object(c_address=c_address) if c.version > -1: index = start_tx2index(start_hash=start_hash) if index <= c.db_index: del tx_builder.pre_unconfirmed[tx.hash] log.debug("remove, too old {}<{} {}".format(index, c.db_index, tx)) continue else: # c.db_index < index # accept correct ordered pass v = get_validator_object(v_address=c.v_address) else: # init tx v = get_validator_by_contract_info(c_address=c_address, start_hash=start_hash) elif tx.type == C.TX_VALIDATOR_EDIT: v_address, new_address, flag, sig_diff = tx.encoded_message() v = get_validator_object(v_address=v_address) else: log.error("Why include pre-unconfirmed? {}".format(tx)) continue # check upgradable signed_cks = set(tx.verified_list) if v.require <= len(signed_cks & set(v.validators)): del tx_builder.pre_unconfirmed[tx.hash] if tx.hash in tx_builder.unconfirmed: log.warning("Upgrade skip, already unconfirmed {}".format(tx)) else: tx_builder.put_unconfirmed(tx=tx) log.info("Upgrade pre-unconfirmed {}".format(tx)) except Exception as e: log.debug("skip by '{}'".format(e), exc_info=True) # 2: sort and get txs to include in block unconfirmed_txs = [ tx for tx in sorted(tx_builder.unconfirmed.values(), key=lambda x: x.create_time) if tx.create_time < prune_limit ] if len(tx_builder.unconfirmed) != len(unconfirmed_txs): log.debug("prune too young tx [{}/{}]".format(len(unconfirmed_txs), len(tx_builder.unconfirmed))) # 3: remove unconfirmed outputs using txs limit_height = chain_builder.best_block.height - C.MATURE_HEIGHT best_block, best_chain = chain_builder.get_best_chain() for tx in unconfirmed_txs.copy(): if tx.height is not None: unconfirmed_txs.remove(tx) # already confirmed continue # inputs check for txhash, txindex in tx.inputs: input_tx = tx_builder.get_tx(txhash=txhash) if input_tx is None: # not found input tx unconfirmed_txs.remove(tx) break elif input_tx.height is None: # use unconfirmed tx's outputs unconfirmed_txs.remove(tx) break elif input_tx.type in (C.TX_POS_REWARD, C.TX_POW_REWARD): if input_tx.height > limit_height: # too young generated outputs unconfirmed_txs.remove(tx) break elif is_usedindex( txhash=txhash, txindex=txindex, except_txhash=tx.hash, best_block=best_block, best_chain=best_chain): # ERROR: already used outputs unconfirmed_txs.remove(tx) break else: pass # all ok # 4: prune oversize txs total_size = 80 + sum(tx.size for tx in unconfirmed_txs) for tx in sorted(unconfirmed_txs, key=lambda x: x.gas_price): if total_size < C.SIZE_BLOCK_LIMIT: break unconfirmed_txs.remove(tx) total_size -= tx.size # 5. check unconfirmed order errored_tx = check_unconfirmed_order( best_block=chain_builder.best_block, ordered_unconfirmed_txs=unconfirmed_txs) if errored_tx is not None: # error is caused by remove tx of too few fee unconfirmed_txs = unconfirmed_txs[:unconfirmed_txs.index(errored_tx)] if errored_tx.hash in failed_txs: if 10 < failed_txs[errored_tx.hash]: del tx_builder.unconfirmed[errored_tx.hash], failed_txs[errored_tx.hash] log.warning('delete too many fail {}'.format(errored_tx)) else: failed_txs[errored_tx.hash] += 1 else: failed_txs[errored_tx.hash] = 1 log.warning('prune error tx {}'.format(errored_tx)) # 6. update unconfirmed txs update_unconfirmed_txs(unconfirmed_txs) return ', unconfirmed={}/{}/{} {}mS'.format( len(unconfirmed_txs), len(tx_builder.unconfirmed), len(tx_builder.pre_unconfirmed), int( (time() - s) * 1000))
def fast_sync_chain(): assert V.PC_OBJ is not None, "Need PeerClient start before." global f_changed_status back_thread = Thread(target=background_sync_chain, name='BackSync', daemon=True) back_thread.start() start = time() # 外部Nodeに次のBlockを逐一尋ねる failed_num = 0 before_block = builder.best_block index_height = before_block.height + 1 logging.debug("Start sync by {}".format(before_block)) while failed_num < 5: if index_height in block_stack: new_block = block_stack[index_height] with write_protect_lock: del block_stack[index_height] elif backend_processing_lock.locked(): sleep(0.1) continue else: with backend_processing_lock: logging.debug( "Stack blocks on front form {}".format(index_height)) r = ask_node(cmd=DirectCmd.BIG_BLOCKS, data={'height': index_height}) if isinstance(r, str): logging.debug("NewBLockGetError:{}".format(r)) before_block = builder.get_block( before_block.previous_hash) index_height = before_block.height + 1 failed_num += 1 continue elif isinstance(r, list): waiter = Waiter(0) waiter.set() waiter = put_to_block_stack(r, waiter) if waiter is None or len(block_stack) == 0: break else: waiter.wait() continue else: failed_num += 1 logging.debug("Not correct format BIG_BLOCKS.") continue # Base check base_check_failed_msg = None if before_block.hash != new_block.previous_hash: base_check_failed_msg = "Not correct previous hash {}".format( new_block) # proof of work check if not new_block.pow_check(): base_check_failed_msg = "Not correct work hash {}".format( new_block) # rollback if base_check_failed_msg is not None: before_block = builder.get_block(before_block.previous_hash) index_height = before_block.height + 1 failed_num += 1 for height in tuple(block_stack.keys()): if height >= index_height: del block_stack[height] logging.debug(base_check_failed_msg) continue # TX check if len(new_block.txs) > 1: with closing(create_db(V.DB_ACCOUNT_PATH)) as db: cur = db.cursor() for tx in new_block.txs: if tx.type in (C.TX_POS_REWARD, C.TX_POW_REWARD): continue check_tx(tx=tx, include_block=None) tx_builder.put_unconfirmed(tx=tx, outer_cur=cur) db.commit() # Block check check_block(new_block) for tx in new_block.txs: tx.height = new_block.height check_tx(tx=tx, include_block=new_block) # Chainに挿入 builder.new_block(new_block) for tx in new_block.txs: user_account.affect_new_tx(tx) builder.batch_apply() f_changed_status = True # 次のBlock failed_num = 0 before_block = new_block index_height = before_block.height + 1 # ロギング if index_height % 100 == 0: logging.debug("Update block {} now...".format(index_height + 1)) # Unconfirmed txを取得 logging.info("Finish get block, next get unconfirmed.") r = None while not isinstance(r, dict): r = ask_node(cmd=DirectCmd.UNCONFIRMED_TX, f_continue_asking=True) for txhash in r['txs']: if txhash in tx_builder.unconfirmed: continue try: r = ask_node(cmd=DirectCmd.TX_BY_HASH, data={'txhash': txhash}, f_continue_asking=True) tx = TX(binary=r['tx']) tx.signature = r['sign'] check_tx_time(tx) check_tx(tx, include_block=None) tx_builder.put_unconfirmed(tx) except BlockChainError: logging.debug("Failed get unconfirmed {}".format( hexlify(txhash).decode())) # 最終判断 reset_good_node() set_good_node() my_best_height = builder.best_block.height best_height_on_network, best_hash_on_network = get_best_conn_info() if best_height_on_network <= my_best_height: logging.info( "Finish update chain data by network. {}Sec [{}<={}]".format( round(time() - start, 1), best_height_on_network, my_best_height)) return True else: logging.debug("Continue update chain, {}<={}".format( best_height_on_network, my_best_height)) return False
def _main_loop(): while not P.F_STOP: sleep(1) if P.F_NOW_BOOTING is False: continue if chain_builder.best_block is None: continue if not back_sync_thread.is_alive(): raise Exception('BackSync is dead!') # start fast sync my_best_block: Block = chain_builder.best_block start_height = my_best_block.height start_time = time() # first of all back_que.put(my_best_block.height + 1) while True: new_block: Block = get_block_from_stack(my_best_block.height + 1) # check blockchain continuity if new_block is None: log.debug( "request height is higher than network height! sync will not need?" ) stack_dict.clear() break if chain_builder.root_block is not None\ and chain_builder.root_block.height is not None\ and new_block.height <= chain_builder.root_block.height: log.error("cannot rollback block depth height={}".format( new_block.height)) P.F_STOP = True return if new_block.hash in chain_builder.chain: log.debug("new block is already known {}".format(new_block)) my_best_block = chain_builder.get_block( blockhash=new_block.hash) continue if my_best_block.hash != new_block.previous_hash: log.debug( "not chained my_best_block with new_block, rollback to {}". format(my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue if len(new_block.txs) <= 0: log.debug("something wrong?, rollback to {}".format( my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue # insert if not new_insert_block( block=new_block, f_time=False, f_sign=False): log.debug("failed to insert new block, rollback to {}".format( my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue # request next chunk if len(stack_dict) < STACK_CHUNK_SIZE: if 0 < len(stack_dict): back_que.put(max(stack_dict) + 1) else: back_que.put(new_block.height + 1) # check reached top height best_height_on_network, best_hash_on_network = get_best_conn_info() if new_block.height < best_height_on_network: my_best_block = new_block continue else: log.info("reached max height of network height={}".format( best_height_on_network)) stack_dict.clear() break # get unconfirmed txs log.info("next get unconfirmed txs") unconfirmed_txhash_set = set() for data in ask_all_nodes(cmd=DirectCmd.UNCONFIRMED_TX): unconfirmed_txhash_set.update(data['txs']) unconfirmed_txs = list() for txhash in unconfirmed_txhash_set: if txhash in tx_builder.unconfirmed: continue try: tx: TX = seek_nodes(cmd=DirectCmd.TX_BY_HASH, data={'txhash': txhash}) tx.height = None fill_verified_addr_tx(tx) unconfirmed_txs.append(tx) except BlockChainError as e: log.debug("1: Failed get unconfirmed {} '{}'".format( txhash.hex(), e)) with create_db(V.DB_ACCOUNT_PATH) as db: cur = db.cursor() for tx in sorted(unconfirmed_txs, key=lambda x: x.time): try: check_tx_time(tx) check_tx(tx, include_block=None) tx_builder.put_unconfirmed(tx=tx, outer_cur=cur) except BlockChainError as e: log.debug("2: Failed get unconfirmed '{}'".format(e)) # fast sync finish log.info("fast sync finished start={} finish={} {}m".format( start_height, chain_builder.best_block.height, int((time() - start_time) / 60))) P.F_NOW_BOOTING = False update_info_for_generate() log.info("close by F_STOP flag")