def create_delegation_info(address: 'Address', value: int) -> 'DelegationInfo': info = DelegationInfo() info.address: 'Address' = address info.value: int = value Logger.debug(f"create_delegation_info: {str(info.address)}", "iiss") return info
def _supplement_db(self, context: 'IconScoreContext', revision: int): # Supplement db which is made by previous icon service version (as there is no version, revision and header) if revision < Revision.IISS.value: return rc_version, _ = self.get_version_and_revision() if rc_version == -1: self._put_version_and_revision(revision) # On the first change point. # We have to put Header for RC if self._db.get(Header.PREFIX) is None: rc_version, rc_revision = self.get_version_and_revision() end_block_height: int = context.storage.iiss.get_end_block_height_of_calc( context) calc_period: int = context.storage.iiss.get_calc_period(context) prev_end_calc_block_height: int = end_block_height - calc_period # if this point is new calc start point ... # we have to set block height in header data. if prev_end_calc_block_height == context.block.height: end_block_height: int = context.block.height header: 'Header' = DataCreator.create_header( rc_version, end_block_height, rc_revision) self.put_data_directly(header) Logger.debug( tag=IISS_LOG_TAG, msg=f"No header data. Put Header to db on open: {str(header)}")
def _on_accepted(self, reader: 'StreamReader', writer: 'StreamWriter'): Logger.debug(f"on_accepted() start: {reader} {writer}") self._tasks.append(asyncio.ensure_future(self._on_send(writer))) self._tasks.append(asyncio.ensure_future(self._on_recv(reader))) Logger.debug("on_accepted() end")
async def _on_send(self, writer: 'StreamWriter'): Logger.info(tag=_TAG, msg="_on_send() start") while self._running: try: request: 'Request' = await self._queue.get() self._queue.task_done() if request.msg_type == MessageType.NONE: # Stopping IPCServer break data: bytes = request.to_bytes() Logger.debug(tag=_TAG, msg=f"on_send(): data({data.hex()}") Logger.info(tag=_TAG, msg=f"Sending Data : {request}") writer.write(data) await writer.drain() except asyncio.CancelledError: pass except BaseException as e: Logger.warning(tag=_TAG, msg=str(e)) writer.close() Logger.info(tag=_TAG, msg="_on_send() end")
def create_delegation_info(cls, address: 'Address', value: int) -> 'DelegationInfo': info = DelegationInfo() info.address = address info.value = value Logger.debug(f"create_delegation_info: {info.address}", cls.TAG) return info
def _load_special_address(self, context: 'IconScoreContext', db_key: str): """Load address info from state db according to db_key :param context: :param db_key: db key info """ Logger.debug( f'_load_address_from_storage() start(address type: {db_key})', ICX_LOG_TAG) text = context.storage.icx.get_text(context, db_key) if text: obj = json.loads(text) # Support to load MainNet 1.0 db address: str = obj['address'] if len(address) == 40: address = f'hx{address}' address: Address = Address.from_string(address) if db_key == self._GENESIS_DB_KEY: self._genesis: 'Address' = address elif db_key == self._TREASURY_DB_KEY: self._fee_treasury: 'Address' = address Logger.info(f'{db_key}: {address}', ICX_LOG_TAG) Logger.debug( f'_load_address_from_storage() end(address type: {db_key})', ICX_LOG_TAG)
def __init__(self, backup_root_path: str, rc_data_path: str): Logger.debug(tag=TAG, msg=f"__init__() start: " f"backup_root_path={backup_root_path}, " f"rc_data_path={rc_data_path}") self._rc_data_path = rc_data_path self._backup_root_path = backup_root_path Logger.info(tag=TAG, msg=f"backup_root_path={self._backup_root_path}") Logger.debug(tag=TAG, msg="__init__() end")
def _handle_icx_issue_formula_for_prep(self, irep: int, rrep: int, total_delegation: int) -> int: calculated_irep: int = self.calculate_irep_per_block_contributor(irep) beta_1: int = calculated_irep * self._prep_count beta_2: int = calculated_irep * self._sub_prep_count temp_rrep = IssueFormula.calculate_temporary_reward_prep(rrep) beta_3: int = temp_rrep * total_delegation // (IISS_ANNUAL_BLOCK * IISS_MAX_REWARD_RATE) Logger.debug("Calculated issue amount about this block. " f"irep: {irep} rrep: {temp_rrep} total_delegation: {total_delegation} " f"beta1: {beta_1} beta2: {beta_2} beta3: {beta_3}", IISS_LOG_TAG) return beta_1 + beta_2 + beta_3
def create_prep_data(block_height: int, total_delegation: int, preps: List['PRep']) -> 'PRepsData': converted_preps: List['DelegationInfo'] = [] for prep in preps: Logger.debug(f"create_prep_data: {str(prep.address)}", "iiss") info = DataCreator.create_delegation_info(prep.address, prep.delegated) converted_preps.append(info) data = PRepsData() data.block_height: int = block_height data.total_delegation: int = total_delegation data.prep_list: List['DelegationInfo'] = converted_preps return data
def replace(self, new_prep: 'PRep') -> Optional['PRep']: """Replace old_prep with new_prep :param new_prep: :return: """ self._check_access_permission() old_prep: Optional['PRep'] = self._prep_dict.get(new_prep.address) if id(old_prep) == id(new_prep): Logger.debug(tag=self._TAG, msg="No need to replace the same P-Rep") return None self._remove(new_prep.address) self._add(new_prep) self._flags |= PRepContainerFlag.DIRTY return old_prep
def load_score(self, score_path: str) -> callable: score_package_info = self._load_json(score_path) pkg_root_import: str = self._make_pkg_root_import(score_path) if self._is_flag_on( IconScoreLoaderFlag.ENABLE_SCORE_PACKAGE_VALIDATOR): ScorePackageValidator().validator(score_path, pkg_root_import) # don't remove code until fix hidden bug! it's like magic number! # I think creating file stream like write log fix the hidden bug Temporarily Logger.debug("== load_score ==") Logger.debug(f'sys_module: {sys.modules}') spec = importlib.util.find_spec( f".{score_package_info[self._MAIN_FILE]}", pkg_root_import) mod = importlib.util.module_from_spec(spec) spec.loader.exec_module(mod) return getattr(mod, score_package_info[self._MAIN_SCORE])
def _set_corrected_issue_data(self, context: 'IconScoreContext', issue_amount: int): regulator_variable: 'RegulatorVariable' = context.storage.issue.get_regulator_variable( context) prev_block_cumulative_fee: int = context.storage.icx.last_block.cumulative_fee calc_next_block_height: int = context.storage.iiss.get_end_block_height_of_calc( context) # update current calculated period total issued icx current_calc_period_total_issued_icx: int = regulator_variable.current_calc_period_issued_icx current_calc_period_total_issued_icx += issue_amount if calc_next_block_height == context.block.height: prev_calc_period_issued_iscore, _ = context.storage.rc.get_calc_response_from_rc( ) if regulator_variable.prev_calc_period_issued_icx == -1: regulator_variable.prev_calc_period_issued_icx, prev_calc_period_issued_iscore = 0, 0 covered_icx_by_fee, covered_icx_by_remain, remain_over_issued_iscore, corrected_icx_issue_amount = \ self._correct_issue_amount_on_calc_period(regulator_variable.prev_calc_period_issued_icx, prev_calc_period_issued_iscore, regulator_variable.over_issued_iscore, issue_amount, prev_block_cumulative_fee) regulator_variable.prev_calc_period_issued_icx = current_calc_period_total_issued_icx regulator_variable.current_calc_period_issued_icx = 0 else: covered_icx_by_fee, covered_icx_by_remain, remain_over_issued_iscore, corrected_icx_issue_amount = \ self._correct_issue_amount(regulator_variable.over_issued_iscore, issue_amount, prev_block_cumulative_fee) regulator_variable.current_calc_period_issued_icx = current_calc_period_total_issued_icx regulator_variable.over_issued_iscore = remain_over_issued_iscore self._regulator_variable = regulator_variable self._covered_icx_by_fee = covered_icx_by_fee self._covered_icx_by_remain = covered_icx_by_remain self._corrected_icx_issue_amount = corrected_icx_issue_amount Logger.debug( f"Block height of this block: {context.block.height} " f"Regulator variable: {self._regulator_variable}", IISS_LOG_TAG)
def create_prep_data(cls, block_height: int, total_delegation: int, preps: Iterable['PRepSnapshot']) -> 'PRepsData': """ :param block_height: :param total_delegation: total delegation of main and sub P-Reps :param preps: main and sub P-Reps :return: """ converted_preps: List['DelegationInfo'] = [] for prep_snapshot in preps: Logger.debug(tag=cls.TAG, msg=f"create_prep_data: {str(prep_snapshot.address)}") info = DataCreator.create_delegation_info(prep_snapshot.address, prep_snapshot.delegated) converted_preps.append(info) data = PRepsData() data.block_height = block_height data.total_delegation = total_delegation data.prep_list = converted_preps return data
async def _on_recv(self, reader: 'StreamReader'): Logger.info(tag=_TAG, msg="_on_recv() start") while self._running: try: data: bytes = await reader.read(1024) if not isinstance(data, bytes) or len(data) == 0: break Logger.debug(tag=_TAG, msg=f"_on_recv(): data({data.hex()})") self._unpacker.feed(data) for response in self._unpacker: Logger.info(tag=_TAG, msg=f"Received Data : {response}") self._queue.message_handler(response) except asyncio.CancelledError: pass except BaseException as e: Logger.warning(tag=_TAG, msg=str(e)) Logger.info(tag=_TAG, msg="_on_recv() end")
def run(self, icx_db: 'KeyValueDatabase', rc_db: 'KeyValueDatabase', revision: int, prev_block: 'Block', block_batch: 'BlockBatch', iiss_wal: 'IissWAL', is_calc_period_start_block: bool, instant_block_hash: bytes): """Backup the previous block state :param icx_db: :param rc_db: :param revision: :param prev_block: the latest confirmed block height during commit :param block_batch: :param iiss_wal: :param is_calc_period_start_block: :param instant_block_hash: :return: """ Logger.debug(tag=TAG, msg="backup() start") path: str = self._get_backup_file_path(prev_block.height) Logger.info(tag=TAG, msg=f"backup_file_path={path}") writer = WriteAheadLogWriter(revision, max_log_count=2, block=prev_block, instant_block_hash=instant_block_hash) writer.open(path) if is_calc_period_start_block: writer.write_state(WALBackupState.CALC_PERIOD_END_BLOCK.value) self._backup_rc_db(writer, rc_db, iiss_wal) self._backup_state_db(writer, icx_db, block_batch) writer.close() Logger.debug(tag=TAG, msg="backup() end")
async def _on_recv(self, reader: 'StreamReader'): Logger.debug("_on_recv() start") while True: data: bytes = await reader.read(1024) if not isinstance(data, bytes) or len(data) == 0: break Logger.debug(f"_on_recv(): data({data.hex()})") self._unpacker.feed(data) for response in self._unpacker: self._queue.message_handler(response) await self._queue.put(NoneRequest()) Logger.debug("_on_recv() end")
async def _on_send(self, writer: 'StreamWriter'): Logger.debug("_on_send() start") while True: request: 'Request' = await self._queue.get() if request.msg_type == MessageType.NONE: self._queue.put_response( NoneResponse.from_list([request.msg_type, request.msg_id])) break data: bytes = request.to_bytes() Logger.debug(f"on_send(): data({data.hex()}") writer.write(data) await writer.drain() writer.close() Logger.debug("_on_send() end")
def put(batch: list, iiss_data: 'Data'): Logger.debug(tag=IISS_LOG_TAG, msg=f"put data: {str(iiss_data)}") batch.append(iiss_data)
def test_many_debug(self): for i in range(100): Logger.debug(TAG, f'debug log{i}')
def test_debug(self): Logger.debug(TAG, 'debug log')
def put(batch: list, iiss_data: 'Data'): Logger.debug(f"put data: {str(iiss_data)}", "iiss") batch.append(iiss_data)