def scan_rc_db(cls, rc_data_path: str) -> Tuple[str, str, str]: """Scan directories that are managed by RewardCalcStorage :param rc_data_path: the parent directory of rc_dbs :return: current_rc_db_exists(bool), standby_rc_db_path, iiss_rc_db_path """ current_rc_db_path: str = "" standby_rc_db_path: str = "" iiss_rc_db_path: str = "" with os.scandir(rc_data_path) as it: for entry in it: if entry.is_dir(): if entry.name == cls.CURRENT_IISS_DB_NAME: current_rc_db_path: str = os.path.join( rc_data_path, cls.CURRENT_IISS_DB_NAME) elif entry.name.startswith( cls.STANDBY_IISS_DB_NAME_PREFIX): standby_rc_db_path: str = os.path.join( rc_data_path, entry.name) elif entry.name.startswith(cls.IISS_RC_DB_NAME_PREFIX): iiss_rc_db_path: str = os.path.join( rc_data_path, entry.name) Logger.info(tag=WAL_LOG_TAG, msg=f"current_rc_db={current_rc_db_path}, " f"standby_rc_db={standby_rc_db_path}, " f"iiss_rc_db={iiss_rc_db_path}") return current_rc_db_path, standby_rc_db_path, iiss_rc_db_path
def close(self): Logger.info(tag=_TAG, msg="close() start") self._loop = None self._unpacker = None Logger.info(tag=_TAG, msg="close() end")
def get_total_elected_prep_delegated_snapshot(self) -> int: """ total_elected_prep_delegated_snapshot = the delegated amount which the elected P-Reps received at the beginning of this term - the delegated amount which unregistered P-Reps received in this term This function is only intended for state backward compatibility and not used any more after revision is set to 7. """ unreg_preps: Set['Address'] = set() db = self._db.get_sub_db(TxData.PREFIX) for k, v in db.iterator(): data: 'TxData' = TxData.from_bytes(v) if data.type == TxType.PREP_UNREGISTER: unreg_preps.add(data.address) db = self._db.get_sub_db(PRepsData.PREFIX) preps: Optional[List['DelegationInfo']] = None for k, v in db.iterator(): data: 'PRepsData' = PRepsData.from_bytes(k, v) preps = data.prep_list break ret = 0 if preps: for info in preps: if info.address not in unreg_preps: ret += info.value Logger.info( tag=IISS_LOG_TAG, msg=f"get_total_elected_prep_delegated_snapshot load: {ret}") return ret
def _on_accepted(self, reader: 'StreamReader', writer: 'StreamWriter'): Logger.info(tag=_TAG, msg=f"on_accepted() start: {reader} {writer}") self._tasks.append(asyncio.ensure_future(self._on_send(writer))) self._tasks.append(asyncio.ensure_future(self._on_recv(reader))) Logger.info(tag=_TAG, msg="on_accepted() end")
def run( self, context: 'IconScoreContext', prep: 'PRep', on_penalty_imposed: Callable[ ['IconScoreContext', 'Address', 'PenaltyReason'], None] ) -> 'PenaltyReason': reason: 'PenaltyReason' = PenaltyReason.NONE if self._check_block_validation_penalty(prep): Logger.info( f"PenaltyImposer statistics({PenaltyReason.BLOCK_VALIDATION}): " f"prep_total_blocks: {prep.total_blocks} " f"prep_block_validation_proportion: {prep.block_validation_proportion}" ) reason = PenaltyReason.BLOCK_VALIDATION if self._check_low_productivity_penalty(prep): Logger.info( f"PenaltyImposer statistics({PenaltyReason.LOW_PRODUCTIVITY}): " f"prep_total_blocks: {prep.total_blocks} " f"prep_unvalidated_sequence_blocks: {prep.unvalidated_sequence_blocks}" ) reason = PenaltyReason.LOW_PRODUCTIVITY if on_penalty_imposed and reason != PenaltyReason.NONE: on_penalty_imposed(context, prep.address, reason) return reason
def _load_special_address(self, context: 'IconScoreContext', db_key: str): """Load address info from state db according to db_key :param context: :param db_key: db key info """ Logger.debug( f'_load_address_from_storage() start(address type: {db_key})', ICX_LOG_TAG) text = context.storage.icx.get_text(context, db_key) if text: obj = json.loads(text) # Support to load MainNet 1.0 db address: str = obj['address'] if len(address) == 40: address = f'hx{address}' address: Address = Address.from_string(address) if db_key == self._GENESIS_DB_KEY: self._genesis: 'Address' = address elif db_key == self._TREASURY_DB_KEY: self._fee_treasury: 'Address' = address Logger.info(f'{db_key}: {address}', ICX_LOG_TAG) Logger.debug( f'_load_address_from_storage() end(address type: {db_key})', ICX_LOG_TAG)
def rollback(self, context: 'IconScoreContext', block_height: int, block_hash: bytes): Logger.info(tag=ROLLBACK_LOG_TAG, msg=f"rollback() start: block_height={block_height} block_hash={bytes_to_hex(block_hash)}") self._load_special_address(context, self._GENESIS_DB_KEY) self._load_special_address(context, self._TREASURY_DB_KEY) self.load_last_block_info(context) Logger.info(tag=ROLLBACK_LOG_TAG, msg="rollback() end")
def _rename_db(old_db_path: str, new_db_path: str): if os.path.exists(old_db_path) and not os.path.exists(new_db_path): os.rename(old_db_path, new_db_path) Logger.info(tag=IISS_LOG_TAG, msg=f"Rename db: {old_db_path} -> {new_db_path}") else: raise DatabaseException( "Cannot create IISS DB because of invalid path. Check both IISS " "current DB path and IISS DB path")
def _get_part(self, context: 'IconScoreContext', part_class: Union[type(CoinPart), type(StakePart), type(DelegationPart)], address: 'Address') -> Union['CoinPart', 'StakePart', 'DelegationPart']: key: bytes = part_class.make_key(address) value: bytes = self._db.get(context, key) if value is None and part_class is CoinPart: Logger.info(tag="PV", msg=f"No CoinPart: {address} {context.block}") return part_class.from_bytes(value) if value else part_class()
def __init__(self, backup_root_path: str, rc_data_path: str): Logger.debug(tag=TAG, msg=f"__init__() start: " f"backup_root_path={backup_root_path}, " f"rc_data_path={rc_data_path}") self._rc_data_path = rc_data_path self._backup_root_path = backup_root_path Logger.info(tag=TAG, msg=f"backup_root_path={self._backup_root_path}") Logger.debug(tag=TAG, msg="__init__() end")
def __init__(self, reset_time: int, threshold: int, ban_time: int): Logger.info( f"DoSGuard config: reset_time={reset_time}, threshold={threshold}, ban_time={ban_time}" ) self._statistics: dict = {c.value: {} for c in Category} self._ban_expired: dict = {c.value: {} for c in Category} self._reset_time: int = reset_time self._threshold: int = threshold self._ban_time: int = ban_time self._last_reset_time: int = now()
def start(self): Logger.info(tag=_TAG, msg="start() start") if self._running: return self._running = True co = asyncio.start_unix_server(self._on_accepted, self._path) asyncio.ensure_future(co) Logger.info(tag=_TAG, msg="start() end")
def start(self): Logger.info(tag=_TAG, msg="start() start") if self._running: return self._running = True co = asyncio.start_unix_server(self._on_accepted, self._path) self._loop.run_until_complete(co) Logger.info(tag=_TAG, msg="start() end")
def stop(self): Logger.info(tag=_TAG, msg="stop() start") if not self._running: return self._running = False for t in self._tasks: t.cancel() Logger.info(tag=_TAG, msg="stop() end")
def open(self, loop, message_queue: 'MessageQueue', path: str): Logger.info(tag=_TAG, msg="open() start") assert loop assert message_queue assert isinstance(path, str) self._loop = loop self._queue = message_queue self._path = path Logger.info(tag=_TAG, msg="open() end")
def _set_corrected_issue_data(self, context: 'IconScoreContext', issue_amount: int): regulator_variable: 'RegulatorVariable' = context.storage.issue.get_regulator_variable( context) prev_block_cumulative_fee: int = context.storage.icx.last_block.cumulative_fee end_block_height_of_calc: int = context.storage.iiss.get_end_block_height_of_calc( context) # Update current calculated period total issued icx current_calc_period_total_issued_icx: int = regulator_variable.current_calc_period_issued_icx current_calc_period_total_issued_icx += issue_amount if end_block_height_of_calc == context.block.height: prev_calc_period_issued_iscore, _, _ = context.storage.rc.get_calc_response_from_rc( ) assert prev_calc_period_issued_iscore >= 0 # In case of the first term of decentralization. # Do not regulate on the first term of decentralization # as Icon service has not issued ICX on the last period of 'pre-vote' # (On pre-vote, icon-foundation provided ICX instead of issuing it) if regulator_variable.prev_calc_period_issued_icx == -1: regulator_variable.prev_calc_period_issued_icx, prev_calc_period_issued_iscore = 0, 0 covered_icx_by_fee, covered_icx_by_remain, remain_over_issued_iscore, corrected_icx_issue_amount = \ self._correct_issue_amount_on_calc_period(regulator_variable.prev_calc_period_issued_icx, prev_calc_period_issued_iscore, regulator_variable.over_issued_iscore, issue_amount, prev_block_cumulative_fee) regulator_variable.prev_calc_period_issued_icx = current_calc_period_total_issued_icx regulator_variable.current_calc_period_issued_icx = 0 else: covered_icx_by_fee, covered_icx_by_remain, remain_over_issued_iscore, corrected_icx_issue_amount = \ self._correct_issue_amount(regulator_variable.over_issued_iscore, issue_amount, prev_block_cumulative_fee) regulator_variable.current_calc_period_issued_icx = current_calc_period_total_issued_icx regulator_variable.over_issued_iscore = remain_over_issued_iscore self._regulator_variable = regulator_variable self._covered_icx_by_fee = covered_icx_by_fee self._covered_icx_by_remain = covered_icx_by_remain self._corrected_icx_issue_amount = corrected_icx_issue_amount Logger.info( f"Regulate BH: {context.block.height} " f"Covered by fee: {self._covered_icx_by_fee} " f"Covered by remain: {self._covered_icx_by_remain} " f"Corrected issue amount {self._corrected_icx_issue_amount}" f"Regulator variable: {self._regulator_variable}", IISS_LOG_TAG)
def index(self, address: 'Address') -> int: """Returns the index of a given address in active_prep_list :return: zero-based index """ prep: 'PRep' = self._prep_dict.get(address) if prep is None: Logger.info(tag="PREP", msg=f"P-Rep not found: {address}") return -1 if prep.status == PRepStatus.ACTIVE: return self._active_prep_list.index(prep) return -1
def _issue(context: 'IconScoreContext', to: 'Address', amount: int): if amount > 0: to_account: 'Account' = context.storage.icx.get_account( context, to) to_account.deposit(amount) current_total_supply = context.storage.icx.get_total_supply( context) context.storage.icx.put_account(context, to_account) context.storage.icx.put_total_supply(context, current_total_supply + amount) Logger.info( f"Issue icx. amount: {amount} " f"Total supply: {current_total_supply + amount} " f"Treasury: {to_account.balance}", ICX_LOG_TAG)
def open(self, context: IconScoreContext, path: str): revision: int = context.revision if not os.path.exists(path): raise DatabaseException(f"Invalid IISS DB path: {path}") self._path = path self._db = self.create_current_db(path) self._db_iiss_tx_index = self._load_last_transaction_index() Logger.info( tag=IISS_LOG_TAG, msg=f"last_transaction_index on open={self._db_iiss_tx_index}") # todo: check side effect of WAL self._supplement_db(context, revision)
def _set_corrected_issue_data(self, context: 'IconScoreContext', issue_amount: int): regulator_variable: 'RegulatorVariable' = context.storage.issue.get_regulator_variable( context) prev_block_cumulative_fee: int = context.storage.icx.last_block.cumulative_fee end_block_height_of_calc: int = context.storage.iiss.get_end_block_height_of_calc( context) # Update current calculated period total issued icx current_calc_period_total_issued_icx: int = regulator_variable.current_calc_period_issued_icx current_calc_period_total_issued_icx += issue_amount if end_block_height_of_calc == context.block.height: prev_calc_period_issued_iscore, _, _ = context.storage.rc.get_calc_response_from_rc( ) assert prev_calc_period_issued_iscore >= 0 if regulator_variable.prev_calc_period_issued_icx == -1: regulator_variable.prev_calc_period_issued_icx, prev_calc_period_issued_iscore = 0, 0 covered_icx_by_fee, covered_icx_by_remain, remain_over_issued_iscore, corrected_icx_issue_amount = \ self._correct_issue_amount_on_calc_period(regulator_variable.prev_calc_period_issued_icx, prev_calc_period_issued_iscore, regulator_variable.over_issued_iscore, issue_amount, prev_block_cumulative_fee) regulator_variable.prev_calc_period_issued_icx = current_calc_period_total_issued_icx regulator_variable.current_calc_period_issued_icx = 0 else: covered_icx_by_fee, covered_icx_by_remain, remain_over_issued_iscore, corrected_icx_issue_amount = \ self._correct_issue_amount(regulator_variable.over_issued_iscore, issue_amount, prev_block_cumulative_fee) regulator_variable.current_calc_period_issued_icx = current_calc_period_total_issued_icx regulator_variable.over_issued_iscore = remain_over_issued_iscore self._regulator_variable = regulator_variable self._covered_icx_by_fee = covered_icx_by_fee self._covered_icx_by_remain = covered_icx_by_remain self._corrected_icx_issue_amount = corrected_icx_issue_amount Logger.info( f"Regulate BH: {context.block.height} " f"Covered by fee: {self._covered_icx_by_fee} " f"Covered by remain: {self._covered_icx_by_remain} " f"Corrected issue amount {self._corrected_icx_issue_amount}" f"Regulator variable: {self._regulator_variable}", IISS_LOG_TAG)
def write(self, precommit_data: 'PrecommitData'): """ Write the human readable precommit data to the file for debugging :param precommit_data: :param path: path to record :return: """ filename: str = f"{precommit_data.block.height}" \ f"-{precommit_data.state_root_hash.hex()[:8]}" \ f"-{self._filename_suffix}" Logger.info( tag=_TAG, msg= f"PrecommitDataWriter.write() start (precommit: {precommit_data})") with open(os.path.join(self._dir_path, filename), 'w') as f: try: block = precommit_data.block json_dict = { "iconservice": __version__, "revision": precommit_data.revision, "block": block.to_dict(to_camel_case) if block is not None else None, "isStateRootHash": precommit_data.is_state_root_hash, "rcStateRootHash": precommit_data.rc_state_root_hash, "stateRootHash": precommit_data.state_root_hash, "prevBlockGenerator": precommit_data.prev_block_generator, "blockBatch": precommit_data.block_batch.to_list(), "rcBlockBatch": self._convert_rc_block_batch_to_list( precommit_data.rc_block_batch) } json.dump(json_dict, f, default=self._json_default) except Exception as e: Logger.exception( tag=_TAG, msg= f"Exception raised during writing the precommit-data: {e}") Logger.info(tag=_TAG, msg=f"PrecommitDataWriter.write() end")
def __init__(self, penalty_grace_period: int, low_productivity_penalty_threshold: int, block_validation_penalty_threshold: int): # Low productivity penalty is not imposed during penalty_grace_period self._penalty_grace_period: int = penalty_grace_period # Unit: percent without fraction self._low_productivity_penalty_threshold: int = low_productivity_penalty_threshold # Unit: The number of blocks self._block_validation_penalty_threshold: int = block_validation_penalty_threshold Logger.info( f"[PenaltyImposer Init] " f"penalty_grace_period: {self._penalty_grace_period} " f"low_productivity_penalty_threshold: {self._low_productivity_penalty_threshold} " f"block_validation_penalty_threshold: {self._block_validation_penalty_threshold}" )
def _handle_icx_issue_formula_for_prep(self, context: 'IconScoreContext', irep: int, rrep: int, total_delegation: int) -> int: calculated_irep: int = self.calculate_irep_per_block_contributor(irep) beta_1: int = 0 beta_2: int = 0 if context.is_decentralized(): beta_1: int = calculated_irep * self.main_prep_count beta_2: int = calculated_irep * PERCENTAGE_FOR_BETA_2 if context.term.total_delegated > 0 else 0 temp_rrep = IssueFormula.calculate_temporary_reward_prep(rrep) beta_3: int = temp_rrep * total_delegation // (IISS_ANNUAL_BLOCK * IISS_MAX_REWARD_RATE) Logger.info( "Calculated issue amount about this block. " f"calculated_irep: {calculated_irep} irep: {irep} rrep: {temp_rrep} " f"total_delegation: {total_delegation} " f"beta1: {beta_1} beta2: {beta_2} beta3: {beta_3}", IISS_LOG_TAG) return beta_1 + beta_2 + beta_3
def verify_signature(msg_hash: bytes, signature: bytes, sender: str) -> bool: if isinstance(msg_hash, bytes) \ and len(msg_hash) == 32 \ and isinstance(signature, bytes) \ and len(signature) == 65: public_key = PublicKey.from_signature_and_message( serialized_sig=signature, message=msg_hash, hasher=None ) address: str = address_from_pubkey(public_key.format(compressed=False)) if address == sender: return True Logger.info(f'Expected address={sender}', "verify_signature") Logger.info(f'Signed address={address}', "verify_signature") return False
def _change_db_name(cls, rc_data_path: str, old_name: str, new_name: str): if old_name == new_name: return src_path: str = os.path.join(rc_data_path, old_name) dst_path: str = os.path.join(rc_data_path, new_name) try: shutil.move(src_path, dst_path) Logger.info( tag=IISS_LOG_TAG, msg= f"Renaming iiss_db_name succeeded: old={old_name} new={new_name}" ) except BaseException as e: Logger.error( tag=IISS_LOG_TAG, msg= f"Failed to rename iiss_db_name: old={old_name} new={new_name} " f"path={rc_data_path} exception={str(e)}")
def run(cls, rc_data_path: str) -> int: ret = 0 with os.scandir(rc_data_path) as it: for entry in it: if entry.is_dir() and entry.name.startswith( cls._DB_NAME_PREFIX): new_name: str = cls._get_db_name_without_revision( entry.name) if not new_name: Logger.info( tag=IISS_LOG_TAG, msg= f"Refactoring iiss_db name has been already done: old={entry.name} " f"rc_data_path={rc_data_path}") break cls._change_db_name(rc_data_path, entry.name, new_name) ret += 1 return ret
async def _on_send(self, writer: 'StreamWriter'): Logger.info(tag=_TAG, msg="_on_send() start") while self._running: try: request: 'Request' = await self._queue.get() self._queue.task_done() if request.msg_type == MessageType.NONE: # Stopping IPCServer break data: bytes = request.to_bytes() Logger.debug(tag=_TAG, msg=f"on_send(): data({data.hex()}") Logger.info(tag=_TAG, msg=f"Sending Data : {request}") writer.write(data) await writer.drain() except asyncio.CancelledError: pass except BaseException as e: Logger.warning(tag=_TAG, msg=str(e)) writer.close() Logger.info(tag=_TAG, msg="_on_send() end")
def verify_signature(msg_hash: bytes, signature: bytes, sender: str) -> bool: if isinstance(msg_hash, bytes) \ and len(msg_hash) == 32 \ and isinstance(signature, bytes) \ and len(signature) == 65: origin_sig, rec_id = signature[:-1], signature[-1] recoverable_sig = _public_key.ecdsa_recoverable_deserialize(origin_sig, rec_id) internal_pubkey = _public_key.ecdsa_recover(msg_hash, recoverable_sig, raw=True, digest=None) public_key = PublicKey(internal_pubkey, raw=False, ctx=_public_key.ctx).serialize(compressed=False) address: str = address_from_pubkey(public_key) Logger.info(f'Expected address={sender}', "verify_signature") Logger.info(f'Signed address={address}', "verify_signature") if address == sender: return True return False
def run(self, icx_db: 'KeyValueDatabase', rc_db: 'KeyValueDatabase', revision: int, prev_block: 'Block', block_batch: 'BlockBatch', iiss_wal: 'IissWAL', is_calc_period_start_block: bool, instant_block_hash: bytes): """Backup the previous block state :param icx_db: :param rc_db: :param revision: :param prev_block: the latest confirmed block height during commit :param block_batch: :param iiss_wal: :param is_calc_period_start_block: :param instant_block_hash: :return: """ Logger.debug(tag=TAG, msg="backup() start") path: str = self._get_backup_file_path(prev_block.height) Logger.info(tag=TAG, msg=f"backup_file_path={path}") writer = WriteAheadLogWriter(revision, max_log_count=2, block=prev_block, instant_block_hash=instant_block_hash) writer.open(path) if is_calc_period_start_block: writer.write_state(WALBackupState.CALC_PERIOD_END_BLOCK.value) self._backup_rc_db(writer, rc_db, iiss_wal) self._backup_state_db(writer, icx_db, block_batch) writer.close() Logger.debug(tag=TAG, msg="backup() end")
async def _on_recv(self, reader: 'StreamReader'): Logger.info(tag=_TAG, msg="_on_recv() start") while self._running: try: data: bytes = await reader.read(1024) if not isinstance(data, bytes) or len(data) == 0: break Logger.debug(tag=_TAG, msg=f"_on_recv(): data({data.hex()})") self._unpacker.feed(data) for response in self._unpacker: Logger.info(tag=_TAG, msg=f"Received Data : {response}") self._queue.message_handler(response) except asyncio.CancelledError: pass except BaseException as e: Logger.warning(tag=_TAG, msg=str(e)) Logger.info(tag=_TAG, msg="_on_recv() end")