def get_bias_by_hash(previous_hash, consensus): N = 30 # target blocks if consensus == C.BLOCK_GENESIS: return 1.0 elif previous_hash == GENESIS_PREVIOUS_HASH: return 1.0 base_difficulty_sum = BASE_TARGET * N target_diffs = list() target_hash = previous_hash for _ in range(MAX_SEARCH_BLOCKS): target_block = chain_builder.get_block(target_hash) if target_block is None: return 1.0 target_hash = target_block.previous_hash if target_hash == GENESIS_PREVIOUS_HASH: return 1.0 elif target_block.flag == consensus and N > len(target_diffs): target_diffs.append( bits2target(target_block.bits) * (N - len(target_diffs))) elif len(target_diffs) >= N: break else: # search too many block if len(target_diffs) == 0: return 1.0 else: return BASE_TARGET * len(target_diffs) / sum(target_diffs) bias = base_difficulty_sum / sum(target_diffs) if Debug.F_SHOW_DIFFICULTY: print("bias", bias, previous_hash.hex()) return bias
def _big_blocks(index_height): data = list() for height in range(index_height, index_height + 20): block = chain_builder.get_block(height=height) if block is None: break data.append(block) # TODO:一度に送信できるBytesにチェック return data
def fill_newblock_info(data): new_block: Block = Block.from_binary(binary=data['binary']) log.debug("fill newblock height={} newblock={}".format( data.get('height'), new_block.hash.hex())) proof: TX = data['proof'] new_block.txs.append(proof) new_block.flag = data['block_flag'] my_block = chain_builder.get_block(new_block.hash) if my_block: raise BlockChainError('Already inserted block {}'.format(my_block)) before_block = chain_builder.get_block(new_block.previous_hash) if before_block is None: log.debug("Cannot find beforeBlock, try to ask outside node") # not found beforeBlock, need to check other node have the the block new_block.inner_score *= 0.70 # unknown previousBlock, score down before_block = make_block_by_node(blockhash=new_block.previous_hash, depth=0) new_height = before_block.height + 1 proof.height = new_height new_block.height = new_height # work check # TODO: correct position? if not new_block.pow_check(): raise BlockChainError('Proof of work is not satisfied') # Append general txs for txhash in data['txs'][1:]: tx = tx_builder.get_tx(txhash) if tx is None: new_block.inner_score *= 0.75 # unknown tx, score down log.debug("Unknown tx, try to download") r = ask_node(cmd=DirectCmd.TX_BY_HASH, data={'txhash': txhash}, f_continue_asking=True) if isinstance(r, str): raise BlockChainError( 'Failed unknown tx download "{}"'.format(r)) tx: TX = r tx.height = None check_tx(tx, include_block=None) tx_builder.put_unconfirmed(tx) log.debug("Success unknown tx download {}".format(tx)) tx.height = new_height new_block.txs.append(tx) return new_block
def start_tx2index(start_hash=None, start_tx=None): if start_hash: start_tx = tx_builder.get_tx(txhash=start_hash) if start_tx.height is None: raise BlockChainError('Not confirmed startTX {}'.format(start_tx)) block = chain_builder.get_block(height=start_tx.height) if block is None: raise BlockChainError( 'Not found block of start_tx included? {}'.format(start_tx)) if start_tx not in block.txs: raise BlockChainError('Not found start_tx in block? {}'.format(block)) return start_tx.height * 0xffffffff + block.txs.index(start_tx)
async def get_block_by_height(request): f_pickled = request.query.get('pickle', False) with_tx_info = request.query.get('txinfo', 'false') try: height = int(request.query['height']) except Exception as e: return web.Response(text="Height is not specified", status=400) blockhash = chain_builder.get_block_hash(height) if blockhash is None: return web.Response(text="Not found height", status=400) block = chain_builder.get_block(blockhash) if f_pickled: block = pickle.dumps(block) return utils.json_res(b64encode(block).decode()) data = block.getinfo(with_tx_info == 'true') data['hex'] = block.b.hex() return utils.json_res(data)
async def get_block_by_hash(request): try: f_pickled = request.query.get('pickle', False) with_tx_info = request.query.get('txinfo', 'false') blockhash = request.query.get('hash') if blockhash is None: return web.Response(text="Not found height", status=400) blockhash = a2b_hex(blockhash) block = chain_builder.get_block(blockhash) if block is None: return web.Response(text="Not found block", status=400) if f_pickled: block = pickle.dumps(block) return utils.json_res(b64encode(block).decode()) data = block.getinfo(with_tx_info == 'true') data['size'] = block.size data['hex'] = block.b.hex() return utils.json_res(data) except Exception as e: return utils.error_res()
async def create_bootstrap(request): try: boot_path = os.path.join( V.DB_HOME_DIR, 'bootstrap-ver{}.dat.gz'.format(__chain_version__)) if os.path.exists(boot_path): log.warning("remove old bootstrap.dat.gz file") os.remove(boot_path) if chain_builder.root_block is None: Exception('root block is None?') s = time() block = None stop_height = chain_builder.root_block.height log.info( "start create bootstrap.dat.gz data to {}".format(stop_height)) with gzip.open(boot_path, mode='ab') as fp: for height, blockhash in chain_builder.db.read_block_hash_iter( start_height=1): if stop_height <= height: break block = chain_builder.get_block(blockhash=blockhash) if block is None: break fp.write(msgpack.dumps((block, block.work_hash, block.bias))) await asyncio.sleep(0.0) if block.height % 100 == 0: log.info( "create bootstrap.dat.gz height={} {}s passed".format( block.height, round(time() - s))) log.info( "create new bootstrap.dat.gz finished, last={} {}Minutes".format( block, (time() - s) // 60)) return utils.json_res({ "height": stop_height, "start_time": int(s), "finish_time": int(time()) }) except Exception: return utils.error_res()
def make_block_by_node(blockhash, depth): """ create Block by outside node """ log.debug("make block by node depth={} hash={}".format( depth, blockhash.hex())) block: Block = seek_nodes(cmd=DirectCmd.BLOCK_BY_HASH, data={'blockhash': blockhash}) before_block = chain_builder.get_block(blockhash=block.previous_hash) if before_block is None: if depth < C.MAX_RECURSIVE_BLOCK_DEPTH: before_block = make_block_by_node(blockhash=block.previous_hash, depth=depth + 1) else: raise BlockChainError( 'Cannot recursive get block depth={} hash={}'.format( depth, block.previous_hash.hex())) height = before_block.height + 1 block.height = height block.inner_score *= 0.70 for tx in block.txs: tx.height = height if not new_insert_block(block=block, f_time=False, f_sign=True): raise BlockChainError( 'Failed insert beforeBlock {}'.format(before_block)) return block
async def submitblock(*args, **kwargs): """ Attempts to submit new block to network. See https://en.bitcoin.it/wiki/BIP_0022 for full specification. Arguments 1. "hexdata" (string, required) the hex-encoded block data to submit 2. "dummy" (optional) dummy value, for compatibility with BIP22. This value is ignored. Result: null if success string if failed """ if len(args) == 0: raise ValueError('no argument found') block_hex_or_obj = args[0] if isinstance(block_hex_or_obj, str): block_bin = a2b_hex(block_hex_or_obj) # Block mined_block = Block.from_binary(binary=block_bin[:80]) if mined_block.previous_hash != chain_builder.best_block.hash: return 'PreviousHash don\'t match' previous_block = chain_builder.get_block(mined_block.previous_hash) mined_block.height = previous_block.height + 1 mined_block.flag = int(kwargs['password']) # tx length storage_flag = int.from_bytes(block_bin[80:81], 'little') if storage_flag < 0xfd: tx_len = storage_flag pos = 81 elif storage_flag == 0xfd: tx_len = int.from_bytes(block_bin[81:83], 'little') pos = 83 elif storage_flag == 0xfe: tx_len = int.from_bytes(block_bin[81:85], 'little') pos = 85 else: # == 0xff tx_len = int.from_bytes(block_bin[81:89], 'little') pos = 89 log.debug("RpcSubmit block: pos={}, tx_len={}".format(pos, tx_len)) # correct txs while len(block_bin) > pos: tx = TX() tx.b = block_bin tx.deserialize(first_pos=pos, f_raise=False) if tx.version != __chain_version__: return 'tx_ver do not match [{}!={}]'.format(tx.version, __chain_version__) pos += len(tx.b) mined_block.txs.append(tx_builder.get_tx(txhash=tx.hash, default=tx)) # check format if tx_len != len(mined_block.txs): return 'Do not match txlen [{}!={}]'.format(tx_len, len(mined_block.txs)) if pos != len(block_bin): return 'Do not match pos [{}!={}]'.format(pos, len(block_bin)) elif isinstance(block_hex_or_obj, Block): mined_block = block_hex_or_obj previous_block = chain_builder.get_block(mined_block.previous_hash) mined_block.height = previous_block.height + 1 mined_block.flag = int(kwargs['password']) else: return 'Unknown input? -> {}'.format(block_hex_or_obj) mined_block.update_pow() if mined_block.pow_check(): confirmed_generating_block(mined_block) return None # accepted else: return 'not satisfied work'
def _block_by_hash(blockhash): block = chain_builder.get_block(blockhash=blockhash) if block is None: return 'Not found blockhash {}'.format(blockhash.hex()) return block
def _block_by_height(height): block = chain_builder.get_block(height=height) if block: return block else: return 'Not found block height {}'.format(height)
def _main_loop(): while not P.F_STOP: sleep(1) if P.F_NOW_BOOTING is False: continue if chain_builder.best_block is None: continue if not back_sync_thread.is_alive(): raise Exception('BackSync is dead!') # start fast sync my_best_block: Block = chain_builder.best_block start_height = my_best_block.height start_time = time() # first of all back_que.put(my_best_block.height + 1) while True: new_block: Block = get_block_from_stack(my_best_block.height + 1) # check blockchain continuity if new_block is None: log.debug( "request height is higher than network height! sync will not need?" ) stack_dict.clear() break if chain_builder.root_block is not None\ and chain_builder.root_block.height is not None\ and new_block.height <= chain_builder.root_block.height: log.error("cannot rollback block depth height={}".format( new_block.height)) P.F_STOP = True return if new_block.hash in chain_builder.chain: log.debug("new block is already known {}".format(new_block)) my_best_block = chain_builder.get_block( blockhash=new_block.hash) continue if my_best_block.hash != new_block.previous_hash: log.debug( "not chained my_best_block with new_block, rollback to {}". format(my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue if len(new_block.txs) <= 0: log.debug("something wrong?, rollback to {}".format( my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue # insert if not new_insert_block( block=new_block, f_time=False, f_sign=False): log.debug("failed to insert new block, rollback to {}".format( my_best_block.height - 1)) my_best_block = chain_builder.get_block( blockhash=my_best_block.previous_hash) back_que.put(my_best_block.height + 1) continue # request next chunk if len(stack_dict) < STACK_CHUNK_SIZE: if 0 < len(stack_dict): back_que.put(max(stack_dict) + 1) else: back_que.put(new_block.height + 1) # check reached top height best_height_on_network, best_hash_on_network = get_best_conn_info() if new_block.height < best_height_on_network: my_best_block = new_block continue else: log.info("reached max height of network height={}".format( best_height_on_network)) stack_dict.clear() break # get unconfirmed txs log.info("next get unconfirmed txs") unconfirmed_txhash_set = set() for data in ask_all_nodes(cmd=DirectCmd.UNCONFIRMED_TX): unconfirmed_txhash_set.update(data['txs']) unconfirmed_txs = list() for txhash in unconfirmed_txhash_set: if txhash in tx_builder.unconfirmed: continue try: tx: TX = seek_nodes(cmd=DirectCmd.TX_BY_HASH, data={'txhash': txhash}) tx.height = None fill_verified_addr_tx(tx) unconfirmed_txs.append(tx) except BlockChainError as e: log.debug("1: Failed get unconfirmed {} '{}'".format( txhash.hex(), e)) with create_db(V.DB_ACCOUNT_PATH) as db: cur = db.cursor() for tx in sorted(unconfirmed_txs, key=lambda x: x.time): try: check_tx_time(tx) check_tx(tx, include_block=None) tx_builder.put_unconfirmed(tx=tx, outer_cur=cur) except BlockChainError as e: log.debug("2: Failed get unconfirmed '{}'".format(e)) # fast sync finish log.info("fast sync finished start={} finish={} {}m".format( start_height, chain_builder.best_block.height, int((time() - start_time) / 60))) P.F_NOW_BOOTING = False update_info_for_generate() log.info("close by F_STOP flag")
def get_bits_by_hash(previous_hash, consensus): if Debug.F_CONSTANT_DIFF: return MAX_BITS, MAX_TARGET elif previous_hash == GENESIS_PREVIOUS_HASH: return MAX_BITS, MAX_TARGET # Get best block time block_time = round(V.BLOCK_TIME_SPAN / V.BLOCK_CONSENSUSES[consensus] * 100) # Get N, K params N, K = params(block_time) # Loop through N most recent blocks. "< height", not "<=". # height-1 = most recently solved rblock target_hash = previous_hash timestamp = list() target = list() j = 0 for _ in range(MAX_SEARCH_BLOCKS): target_block = chain_builder.get_block(target_hash) if target_block is None: return MAX_BITS, MAX_TARGET if target_block.flag != consensus: target_hash = target_block.previous_hash continue if j == N + 1: break j += 1 timestamp.insert(0, target_block.time) target.insert(0, bits2target(target_block.bits)) target_hash = target_block.previous_hash if target_hash == GENESIS_PREVIOUS_HASH: return MAX_BITS, MAX_TARGET else: # search too many block if len(target) < 2: # not found any mined blocks return MAX_BITS, MAX_TARGET else: # May have been a sudden difficulty raise # overwrite N param N = len(timestamp) - 1 sum_target = t = j = 0 for i in range(N): solve_time = max(0, timestamp[i + 1] - timestamp[i]) j += 1 t += solve_time * j sum_target += target[i + 1] # Keep t reasonable in case strange solvetimes occurred. if t < N * K // 3: t = N * K // 3 new_target = t * sum_target // K // N // N if MAX_TARGET < new_target: return MAX_BITS, MAX_TARGET # convert new target to bits new_bits = target2bits(new_target) if Debug.F_SHOW_DIFFICULTY: print("ratio", C.consensus2name[consensus], new_bits, previous_hash.hex()) return new_bits, new_target
def repair_wallet(gap_user=10, gap_limit=20): log.info("Wallet fix tool start now") with create_db(V.DB_ACCOUNT_PATH) as db: cur = db.cursor() search = Search(gap_user=gap_user, gap_limit=gap_limit, cur=cur) for height, blockhash in chain_builder.db.read_block_hash_iter( start_height=0): block = chain_builder.get_block(blockhash=blockhash) for tx in block.txs: is_related = False for txhash, txindex in tx.inputs: input_tx = tx_builder.get_tx(txhash) address, coin_id, amount = input_tx.outputs[txindex] if address in search: search.recode(address) is_related = True break elif read_address2userid(address=address, cur=cur): is_related = True break if not is_related: for address, coin_id, amount in tx.outputs: if address in search: search.recode(address) is_related = True break elif read_address2userid(address=address, cur=cur): is_related = True break # recode or ignore if is_related: if read_txhash2movelog(txhash=tx.hash, cur=cur): continue movement = Accounting() for txhash, txindex in tx.inputs: input_tx = tx_builder.get_tx(txhash) address, coin_id, amount = input_tx.outputs[txindex] user = read_address2userid(address=address, cur=cur) if user is not None: movement[user][coin_id] -= amount # movement[C.ANT_OUTSIDE] += balance for address, coin_id, amount in tx.outputs: user = read_address2userid(address, cur) if user is not None: movement[user][coin_id] += amount # movement[C.ANT_OUTSIDE] -= balance # check movement.cleanup() if len(movement) == 0: continue if read_txhash2movelog(txhash=tx.hash, cur=cur): continue insert_movelog(movements=movement, cur=cur, ntype=tx.type, ntime=tx.time, txhash=tx.hash) log.info("Find not recoded transaction {}".format(tx)) if height % 5000 == 0: log.info("Now height {}".format(height)) db.commit() log.info("Finish wallet repair")