コード例 #1
0
    def get_chain_head_hash_at_timestamp(self, address, timestamp):
        validate_canonical_address(address, title="Wallet Address")
        validate_uint256(timestamp, title='timestamp')
        #make sure it isnt in the future
        if timestamp > int(time.time()):
            raise InvalidHeadRootTimestamp()

        #first make sure the timestamp is correct.
        if timestamp % TIME_BETWEEN_HEAD_HASH_SAVE != 0:
            raise InvalidHeadRootTimestamp()

        historical_roots = self.get_historical_root_hashes()
        if historical_roots is None:
            return None

        if timestamp < historical_roots[0][0]:
            return None

        historical_roots_dict = dict(historical_roots)

        try:
            historical_root = historical_roots_dict[timestamp]
        except KeyError:
            historical_root = historical_roots[-1][1]

        new_chain_head_db = ChainHeadDB(self.db, historical_root)
        head_hash = new_chain_head_db._trie_cache.get(address)
        return head_hash
コード例 #2
0
 def set_current_syncing_info(self, timestamp: Timestamp,
                              head_root_hash: Hash32) -> None:
     validate_is_bytes(head_root_hash, title='Head Root Hash')
     validate_uint256(timestamp, title='timestamp')
     encoded = rlp.encode([timestamp, head_root_hash],
                          sedes=CurrentSyncingInfo)
     self.db[SchemaV1.make_current_syncing_info_lookup_key()] = encoded
コード例 #3
0
    def delete_block_hash_from_chronological_window(
            self,
            head_hash: Hash32,
            timestamp: Timestamp = None,
            window_timestamp: Timestamp = None) -> None:
        '''
        If timestamp is given, then deleted [timestamp, head_hash] from the list. This is fastest.
        But if head_hash and window_timestamp is given, without a timestamp, then we search the list for the given hash and delete it. This is slower
        :param head_hash:
        :param timestamp:
        :param window_timestamp:
        :return:
        '''
        validate_is_bytes(head_hash, title='Head Hash')
        validate_uint256(timestamp, title='timestamp')

        if timestamp is None and window_timestamp is not None:
            # we search now for just the hash
            if window_timestamp > int(time.time()) - (
                    NUMBER_OF_HEAD_HASH_TO_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE:
                # onlike the root hashes, this window is for the blocks added after the time
                window_timestamp = int(
                    window_timestamp /
                    TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE

                data = self.load_chronological_block_window(window_timestamp)
                hashes = [x[1] for x in data]
                try:
                    idx = hashes.index(head_hash)
                    del (data[idx])
                except ValueError:
                    return

                if data is not None:
                    # self.logger.debug("Saving chronological block window with new data {}".format(new_data))
                    self.save_chronological_block_window(
                        data, window_timestamp)

        else:
            #only add blocks for the proper time period
            if timestamp > int(time.time()) - (
                    NUMBER_OF_HEAD_HASH_TO_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE:
                #onlike the root hashes, this window is for the blocks added after the time
                window_for_this_block = int(
                    timestamp /
                    TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE

                data = self.load_chronological_block_window(
                    window_for_this_block)
                if data is not None:
                    #most of the time we will be adding the timestamp near the end. so lets iterate backwards
                    try:
                        data.remove([timestamp, head_hash])
                    except ValueError:
                        pass

                if data is not None:
                    #self.logger.debug("Saving chronological block window with new data {}".format(new_data))
                    self.save_chronological_block_window(
                        data, window_for_this_block)
コード例 #4
0
    def get_historical_root_hash(self,
                                 timestamp: Timestamp,
                                 return_timestamp: bool = False
                                 ) -> Tuple[Optional[Timestamp], Hash32]:
        '''
        This returns the historical root hash for a given timestamp.
        If no root hash exists for this timestamp, it will return the latest root hash prior to this timestamp
        '''
        validate_uint256(timestamp, title='timestamp')
        if timestamp % TIME_BETWEEN_HEAD_HASH_SAVE != 0:
            timestamp = int(
                timestamp /
                TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE
        historical = self.get_historical_root_hashes()

        root_hash_to_return = None
        timestamp_to_return = None

        timestamps = [x[0] for x in historical]
        right_index = bisect.bisect_right(timestamps, timestamp)
        if right_index:
            index = right_index - 1
            timestamp_to_return, root_hash_to_return = historical[index]

        if return_timestamp:
            return timestamp_to_return, root_hash_to_return
        else:
            return root_hash_to_return
コード例 #5
0
    def __init__(self,
                 gas,
                 to,
                 sender,
                 value,
                 data,
                 code,
                 depth=0,
                 create_address=None,
                 code_address=None,
                 should_transfer_value=True,
                 is_static=False,
                 refund_amount=0):
        validate_uint256(gas, title="Message.gas")
        self.gas = gas  # type: int

        if to != CREATE_CONTRACT_ADDRESS:
            validate_canonical_address(to, title="Message.to")
        self.to = to

        validate_canonical_address(sender, title="Message.sender")
        self.sender = sender

        validate_uint256(value, title="Message.value")
        self.value = value

        validate_is_bytes(data, title="Message.data")
        self.data = data

        validate_is_integer(depth, title="Message.depth")
        validate_gte(depth, minimum=0, title="Message.depth")
        self.depth = depth

        validate_is_bytes(code, title="Message.code")
        self.code = code

        if create_address is not None:
            validate_canonical_address(create_address,
                                       title="Message.storage_address")
        self.storage_address = create_address

        if code_address is not None:
            validate_canonical_address(code_address,
                                       title="Message.code_address")
        self.code_address = code_address

        validate_is_boolean(should_transfer_value,
                            title="Message.should_transfer_value")
        self.should_transfer_value = should_transfer_value

        validate_is_integer(depth, title="Message.refund_amount")
        self.refund_amount = refund_amount

        validate_is_boolean(is_static, title="Message.is_static")
        self.is_static = is_static
コード例 #6
0
 def save_chronological_block_window(self, data, timestamp):
     validate_uint256(timestamp, title='timestamp')
     if timestamp % TIME_BETWEEN_HEAD_HASH_SAVE != 0:
         raise InvalidHeadRootTimestamp("Can only save or load chronological block for timestamps in increments of {} seconds.".format(TIME_BETWEEN_HEAD_HASH_SAVE))
     
     chronological_window_lookup_key = SchemaV1.make_chronological_window_lookup_key(timestamp)
     encoded_data = rlp.encode(data,sedes=rlp.sedes.FCountableList(rlp.sedes.FList([f_big_endian_int, hash32])))
     self.db.set(
         chronological_window_lookup_key,
         encoded_data,
     )
コード例 #7
0
 def delete_chronological_block_window(self, timestamp):
     validate_uint256(timestamp, title='timestamp')
     if timestamp % TIME_BETWEEN_HEAD_HASH_SAVE != 0:
         raise InvalidHeadRootTimestamp("Can only save or load chronological block for timestamps in increments of {} seconds.".format(TIME_BETWEEN_HEAD_HASH_SAVE))
     
     self.logger.debug("deleting chronological block window for timestamp {}".format(timestamp))
     chronological_window_lookup_key = SchemaV1.make_chronological_window_lookup_key(timestamp)
     try:
         del(self.db[chronological_window_lookup_key])
     except KeyError:
         pass
コード例 #8
0
 def load_chronological_block_window(self, timestamp: Timestamp) -> Optional[List[Union[int, Hash32]]]:
     validate_uint256(timestamp, title='timestamp')
     if timestamp % TIME_BETWEEN_HEAD_HASH_SAVE != 0:
         raise InvalidHeadRootTimestamp("Can only save or load chronological block for timestamps in increments of {} seconds.".format(TIME_BETWEEN_HEAD_HASH_SAVE))
     
     chronological_window_lookup_key = SchemaV1.make_chronological_window_lookup_key(timestamp)
     try:
         data = rlp.decode(self.db[chronological_window_lookup_key], sedes=rlp.sedes.FCountableList(rlp.sedes.FList([f_big_endian_int, hash32])), use_list = True)
         data.sort()
         return data
     except KeyError:
         return None
コード例 #9
0
    def get_storage(self, address, slot):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(slot, title="Storage Slot")

        account = self._get_account(address)
        storage = HashTrie(HexaryTrie(self._journaldb, account.storage_root))

        slot_as_key = pad32(int_to_big_endian(slot))

        if slot_as_key in storage:
            encoded_value = storage[slot_as_key]
            return rlp.decode(encoded_value, sedes=rlp.sedes.big_endian_int)
        else:
            return 0
コード例 #10
0
    def save_current_historical_network_tpc_capability(
            self, current_tpc_capability: int) -> None:
        validate_uint256(current_tpc_capability,
                         title="current_tpc_capability")
        if current_tpc_capability < 1:
            current_tpc_capability = 1

        existing = self.load_historical_network_tpc_capability()
        current_centisecond = int(time.time() / 100) * 100
        if existing is None:
            existing = [[current_centisecond, current_tpc_capability]]
        else:
            existing.append([current_centisecond, current_tpc_capability])
        self.save_historical_network_tpc_capability(existing, de_sparse=True)
コード例 #11
0
    def get_next_n_head_block_hashes(self,
                                     prev_head_hash=ZERO_HASH32,
                                     window_start=0,
                                     window_length=1,
                                     root_hash=None,
                                     reverse=False):
        """
        Gets the next head block hash in the leaves of the binary trie
        """

        validate_is_bytes(prev_head_hash, title='prev_head_hash')
        validate_uint256(window_start, title='window_start')
        validate_uint256(window_length, title='window_length')

        if root_hash is None:
            root_hash = self.root_hash

        validate_is_bytes(root_hash, title='Root Hash')

        output_list = []
        next = False
        i = 0
        j = 0
        last = None
        for head_hash in self.get_head_block_hashes(root_hash,
                                                    reverse=reverse):

            if next == True or (prev_head_hash == ZERO_HASH32
                                and window_start == 0):
                output_list.append(head_hash)
                i += 1
                if i >= window_length:
                    return output_list

            if head_hash == prev_head_hash or prev_head_hash == ZERO_HASH32:
                if prev_head_hash == ZERO_HASH32:
                    j += 1
                if j >= window_start:
                    next = True
                j += 1

            last = head_hash

        #if it gets here then we got to the last chain
        if len(output_list) < 1:
            output_list.append(last)
        return output_list
コード例 #12
0
    def set_storage(self, address, slot, value):
        validate_uint256(value, title="Storage Value")
        validate_uint256(slot, title="Storage Slot")
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        storage = HashTrie(HexaryTrie(self._journaldb, account.storage_root))

        slot_as_key = pad32(int_to_big_endian(slot))

        if value:
            encoded_value = rlp.encode(value)
            storage[slot_as_key] = encoded_value
        else:
            del storage[slot_as_key]

        self._set_account(address, account.copy(storage_root=storage.root_hash))
コード例 #13
0
    def add_block_hash_to_chronological_window(self, head_hash: Hash32, timestamp: Timestamp) -> None:
        self.logger.debug("add_block_hash_to_chronological_window, hash = {}, timestamp = {}".format(encode_hex(head_hash), timestamp))
        validate_is_bytes(head_hash, title='Head Hash')
        validate_uint256(timestamp, title='timestamp')

        # only add blocks for the proper time period
        if timestamp >= int(time.time()) - (NUMBER_OF_HEAD_HASH_TO_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE:
            # unlike the root hashes, this window is for the blocks added after the time
            window_for_this_block = int(timestamp / TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE

            data = self.load_chronological_block_window(window_for_this_block)

            if data is None:
                data = [[timestamp, head_hash]]

            else:
                data.append([timestamp, head_hash])

            self.save_chronological_block_window(data, window_for_this_block)
コード例 #14
0
    def write(self, start_position: int, size: int, value: bytes) -> None:
        """
        Write `value` into memory.
        """
        if size:
            validate_uint256(start_position)
            validate_uint256(size)
            validate_is_bytes(value)
            validate_length(value, length=size)
            validate_lte(start_position + size, maximum=len(self))

            if len(self._bytes) < start_position + size:
                self._bytes.extend(itertools.repeat(
                    0,
                    len(self._bytes) - (start_position + size),
                ))

            for idx, v in enumerate(value):
                self._bytes[start_position + idx] = v
コード例 #15
0
    def __init__(self, origin: Address, send_tx_hash: Hash32, caller_chain_address:Address, gas_price: int = None, receive_tx_hash: Hash32 = None, is_receive: bool = False, is_refund: bool = False):
        if gas_price is not None:
            validate_uint256(gas_price, title="TransactionContext.gas_price")
        self._gas_price = gas_price
        validate_canonical_address(origin, title="TransactionContext.origin")
        self._origin = origin
        validate_canonical_address(caller_chain_address, title='caller_chain_address')
        self._caller_chain_address = caller_chain_address
        validate_is_boolean(is_receive, title="is_receive")
        self._is_receive = is_receive
        validate_is_boolean(is_refund, title="is_from_refund")
        self._is_refund = is_refund
        validate_word(send_tx_hash, title="send_tx_hash")
        self._send_tx_hash = send_tx_hash
        if receive_tx_hash is not None:
            validate_word(receive_tx_hash, title="receive_tx_hash")
        self._receive_tx_hash = receive_tx_hash

        self._log_counter = itertools.count()
コード例 #16
0
    def extend_memory(self, start_position: int, size: int) -> None:
        """
        Extend the size of the memory to be at minimum ``start_position + size``
        bytes in length.  Raise `hvm.exceptions.OutOfGas` if there is not enough
        gas to pay for extending the memory.
        """
        validate_uint256(start_position, title="Memory start position")
        validate_uint256(size, title="Memory size")

        before_size = ceil32(len(self._memory))
        after_size = ceil32(start_position + size)

        before_cost = memory_gas_cost(before_size)
        after_cost = memory_gas_cost(after_size)

        self.logger.debug(
            "MEMORY: size (%s -> %s) | cost (%s -> %s)",
            before_size,
            after_size,
            before_cost,
            after_cost,
        )

        if size:
            if before_cost < after_cost:
                gas_fee = after_cost - before_cost
                self._gas_meter.consume_gas(
                    gas_fee,
                    reason=" ".join((
                        "Expanding memory",
                        str(before_size),
                        "->",
                        str(after_size),
                    ))
                )

            self._memory.extend(start_position, size)
コード例 #17
0
    def save_health_request(
        self,
        peer_wallet_address: Address,
        response_time_in_micros: int = float('inf')
    ) -> None:

        peer_node_health = self.get_current_peer_node_health(
            peer_wallet_address)

        # ('requests_sent', f_big_endian_int),
        # ('failed_requests', f_big_endian_int),
        # ('average_response_time', f_big_endian_int)  # milliseconds
        #

        new_requests_sent = peer_node_health.requests_sent + 1
        if response_time_in_micros == float('inf'):
            #it didn't respond
            new_failed_requests = peer_node_health.failed_requests + 1
            new_average_response_time = peer_node_health.average_response_time
        else:
            new_failed_requests = peer_node_health.failed_requests
            new_average_response_time = int(
                add_sample_to_average(peer_node_health.average_response_time,
                                      response_time_in_micros,
                                      new_requests_sent))

        validate_uint256(new_requests_sent, title="new_requests_sent")
        validate_uint256(new_failed_requests, title="new_failed_requests")
        validate_uint256(new_average_response_time,
                         title="new_average_response_time")

        self.set_current_peer_node_health(
            peer_wallet_address,
            peer_node_health.copy(
                requests_sent=new_requests_sent,
                failed_requests=new_failed_requests,
                average_response_time=new_average_response_time))

        #save this time as the latest timestamp for save health request
        lookup_key = SchemaV1.make_latest_peer_node_health_timestamp_lookup_key(
        )
        timestamp_rounded_peer_node_health_check = int(
            int(time.time() / (TIME_BETWEEN_PEER_NODE_HEALTH_CHECK)) *
            (TIME_BETWEEN_PEER_NODE_HEALTH_CHECK))
        rlp_encoded = rlp.encode(timestamp_rounded_peer_node_health_check,
                                 sedes=rlp.sedes.f_big_endian_int)
        self.db[lookup_key] = rlp_encoded
コード例 #18
0
    def set_block_number(self, address, block_number):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(block_number, title="Block Number")

        account = self._get_account(address)
        self._set_account(address, account.copy(block_number=block_number))
コード例 #19
0
    def set_nonce(self, address, nonce):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(nonce, title="Nonce")

        account = self._get_account(address)
        self._set_account(address, account.copy(nonce=nonce))
コード例 #20
0
    def set_balance(self, address, balance):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(balance, title="Account Balance")

        account = self._get_account(address)
        self._set_account(address, account.copy(balance=balance))
コード例 #21
0
    def validate_reward_bundle(self, reward_bundle: StakeRewardBundle,
                               chain_address: Address,
                               block_timestamp: Timestamp) -> None:

        latest_reward_block_number = self.chaindb.get_latest_reward_block_number(
            chain_address)
        latest_reward_block_timestamp = self.chaindb.get_canonical_block_header_by_number(
            latest_reward_block_number, chain_address).timestamp

        validate_uint256(block_timestamp)

        # need to check to make sure it has been long enough since the last reward block.
        if block_timestamp - latest_reward_block_timestamp < self.min_time_between_reward_blocks:
            raise ValidationError(
                "Not enough time between reward blocks. Got {}, expected {}".
                format((block_timestamp - latest_reward_block_timestamp),
                       self.min_time_between_reward_blocks))

        #first we validate reward type 1. All reward bundles must contain this.
        #we have to allow a bit of time difference allowance because the node can calculate this amount just before it sets the block timestamp
        reward_type_1_max_amount = self.calculate_final_reward_type_1_amount(
            chain_address, block_timestamp +
            REWARD_BLOCK_AND_BUNDLE_TIMESTAMP_VARIABILITY_ALLOWANCE)
        reward_type_1_min_amount = self.calculate_final_reward_type_1_amount(
            chain_address, block_timestamp -
            REWARD_BLOCK_AND_BUNDLE_TIMESTAMP_VARIABILITY_ALLOWANCE)

        if reward_bundle.reward_type_1.amount > reward_type_1_max_amount or reward_bundle.reward_type_1.amount < reward_type_1_min_amount:
            raise ValidationError(
                "Reward type 1 amount is not within the allowed range. Allowed from {} to {} but got {}"
                .format(reward_type_1_min_amount, reward_type_1_max_amount,
                        reward_bundle.reward_type_1.amount))

        #next we validate reward type 2. Only some bundles will contain this.
        if reward_bundle.reward_type_2.amount != 0:
            # need to create function that validates the reward.
            # check timestamps are all near the block timestamp. leave wiggle room for network latency
            # here we also make sure there arent multiple scores for one chain
            proof_senders = set()
            for node_staking_score in reward_bundle.reward_type_2.proof:
                if node_staking_score.sender in proof_senders:
                    raise ValidationError(
                        "Multiple reward type 2 proofs from the same sender. Sender address: {}"
                        .format(encode_hex(node_staking_score.sender)))
                proof_senders.add(node_staking_score.sender)

                self.validate_node_staking_score_with_context(
                    node_staking_score,
                    chain_address=chain_address,
                    block_timestamp=block_timestamp,
                    latest_reward_block_number=latest_reward_block_number)

            #These functions will check for minimum required stake, and minimum number of proofs.
            reward_type_2_max_amount, proof_list = self.calculate_final_reward_type_2_amount(
                list(reward_bundle.reward_type_2.proof), block_timestamp +
                REWARD_BLOCK_AND_BUNDLE_TIMESTAMP_VARIABILITY_ALLOWANCE)
            reward_type_2_min_amount, _ = self.calculate_final_reward_type_2_amount(
                list(reward_bundle.reward_type_2.proof), block_timestamp -
                REWARD_BLOCK_AND_BUNDLE_TIMESTAMP_VARIABILITY_ALLOWANCE)

            #make sure they aren't including more proof then nessisary
            if len(proof_list) != len(reward_bundle.reward_type_2.proof):
                raise ValidationError(
                    "The reward type 2 contains to many entries for proof. Expected {}, but got {}."
                    .format(len(proof_list),
                            len(reward_bundle.reward_type_2.proof)))

            if reward_bundle.reward_type_2.amount > reward_type_2_max_amount or reward_bundle.reward_type_2.amount < reward_type_2_min_amount:
                raise ValidationError(
                    "Reward type 2 amount is not within the allowed range. Allowed from {} to {} but got {}"
                    .format(reward_type_2_min_amount, reward_type_2_max_amount,
                            reward_bundle.reward_type_2.amount))
        else:
            #if the value is 0, lets make sure there are no proofs
            if len(reward_bundle.reward_type_2.proof) > 0:
                raise ValidationError(
                    "Reward type 2 has a value of 0, but there is proof given. Don't need proof if there is no amount."
                )
コード例 #22
0
    def validate(self):

        validate_canonical_address(self.recipient_node_wallet_address, title="recipient_node_wallet_address")
        validate_uint256(self.score, title="score")
        validate_uint256(self.since_block_number, title="since_block_number")
        validate_uint256(self.timestamp, title="timestamp")

        validate_uint256(self.v, title="v")
        validate_uint256(self.r, title="r")
        validate_uint256(self.s, title="s")

        validate_lt_secpk1n(self.r, title="r")
        validate_gte(self.r, minimum=1, title="r")
        validate_lt_secpk1n(self.s, title="s")
        validate_gte(self.s, minimum=1, title="s")

        validate_gte(self.v, minimum=self.v_min, title="v")
        validate_lte(self.v, maximum=self.v_max, title="v")

        validate_lt_secpk1n2(self.s, title="s")
コード例 #23
0
    def calculate_reward_based_on_fractional_interest(self,  wallet_address: Address, fractional_interest: float, at_timestamp: Timestamp = None, include_masternode_bonus = True) -> int:
        '''
        #
        # Added in different masternode levels
        #
        Here we assume the time period for the reward starts from the latest reward block. This is a valid assumption
        because blocks can only be added to the top of the chain
        :param wallet_address:
        :param fractional_interest:
        :param at_timestamp:
        :return:
        '''
        validate_canonical_address(wallet_address, 'wallet_address')
        validate_uint256(at_timestamp, title="at_timestamp")

        if at_timestamp is None:
            at_timestamp = int(time.time())

        latest_reward_block_number = self.chaindb.get_latest_reward_block_number(wallet_address)
        try:
            since_timestamp = self.chaindb.get_canonical_block_header_by_number(latest_reward_block_number,
                                                                                wallet_address).timestamp
        except HeaderNotFound:
            return 0
        canonical_head_block_number = self.chaindb.get_canonical_head(wallet_address).block_number

        # loop backwards to make things simpler
        calc_to_timestamp = at_timestamp
        amount = 0
        for current_block_number in range(canonical_head_block_number, -1, -1):
            header = self.chaindb.get_canonical_block_header_by_number(BlockNumber(current_block_number), wallet_address)

            header_mature_timestamp = header.timestamp + self.coin_mature_time_for_staking
            # this finds the start of the calculation
            if header_mature_timestamp >= calc_to_timestamp:
                continue

            # this finds the end of the calculation
            if calc_to_timestamp <= since_timestamp:
                break

            # if header_mature_timestamp <= since_timestamp:
            #     break

            if header_mature_timestamp < since_timestamp:
                # if the block is older than the since timestamp, but there is still a small window of coin_mature_time_for_staking to add in.
                time_difference = int(calc_to_timestamp - since_timestamp)
            else:
                time_difference = int(calc_to_timestamp - header_mature_timestamp)

            calc_stake = header.account_balance
            if include_masternode_bonus:
                if calc_stake >= self.masternode_level_3_required_balance:
                    masternode_multiplier = self.masternode_level_3_multiplier
                elif calc_stake >= self.masternode_level_2_required_balance:
                    masternode_multiplier = self.masternode_level_2_multiplier
                elif calc_stake >= self.masternode_level_1_required_balance:
                    masternode_multiplier = self.masternode_level_1_multiplier
                else:
                    masternode_multiplier = 1

                if at_timestamp < EARLY_BIRD_BONUS_CUTOFF_TIMESTAMP:
                    masternode_multiplier = Decimal(masternode_multiplier*EARLY_BIRD_BONUS_FACTOR)
            else:
                masternode_multiplier = 1


            amount += int(time_difference * calc_stake * fractional_interest * masternode_multiplier)
            # print('XXXXXXXXXXXXXXXXXXX')
            # print(amount, time_difference, calc_stake, fractional_interest, masternode_multiplier)

            #print("actual calculation = {} * {} * {} * {}".format(time_difference, calc_stake, fractional_interest, masternode_multiplier))

            calc_to_timestamp = header_mature_timestamp

        # if we are calculating all the way to the genesis block, there will be a small
        # COIN_MATURE_TIME_FOR_STAKING that we missed. however, this window has 0 stake, so it would add nothing
        return int(amount)
コード例 #24
0
    def validate(self):
        validate_uint256(self.nonce, title="Transaction.nonce")
        validate_uint256(self.gas_price, title="Transaction.gas_price")
        validate_uint256(self.gas, title="Transaction.gas")
        if self.to != CREATE_CONTRACT_ADDRESS:
            validate_canonical_address(self.to, title="Transaction.to")
        validate_uint256(self.value, title="Transaction.value")
        validate_is_bytes(self.data, title="Transaction.data")

        validate_uint256(self.v, title="Transaction.v")
        validate_uint256(self.r, title="Transaction.r")
        validate_uint256(self.s, title="Transaction.s")

        validate_lt_secpk1n(self.r, title="Transaction.r")
        validate_gte(self.r, minimum=1, title="Transaction.r")
        validate_lt_secpk1n(self.s, title="Transaction.s")
        validate_gte(self.s, minimum=1, title="Transaction.s")

        validate_gte(self.v, minimum=self.v_min, title="Transaction.v")
        validate_lte(self.v, maximum=self.v_max, title="Transaction.v")

        super(HeliosTestnetTransaction, self).validate()
        validate_lt_secpk1n2(self.s, title="Transaction.s")
コード例 #25
0
def test_validate_uint256(value, is_valid):
    if is_valid:
        validate_uint256(value)
    else:
        with pytest.raises(ValidationError):
            validate_uint256(value)
コード例 #26
0
    def add_block_hash_to_timestamp(self, address, head_hash, timestamp):

        self.logger.debug("add_block_hash_to_timestamp")

        validate_canonical_address(address, title="Wallet Address")
        validate_is_bytes(head_hash, title='Head Hash')
        validate_uint256(timestamp, title='timestamp')


        currently_saving_window = int(time.time()/TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE +TIME_BETWEEN_HEAD_HASH_SAVE
        #make sure it isnt in the future
        if timestamp > currently_saving_window:
            raise InvalidHeadRootTimestamp()
        
        
        #first make sure the timestamp is correct.
        if timestamp % TIME_BETWEEN_HEAD_HASH_SAVE != 0:
            raise InvalidHeadRootTimestamp()
        
        starting_timestamp, existing_root_hash = self.get_historical_root_hash(timestamp, return_timestamp = True)
        historical_roots = self.get_historical_root_hashes()

        if historical_roots is None:
            if head_hash == BLANK_HASH:
                self.delete_chain_head_hash(address)
            else:
                self.set_chain_head_hash(address, head_hash)
            self.persist()
            historical_roots = [[timestamp, self.root_hash]]
        else:

            if starting_timestamp is None:
                #this means there is no saved root hash that is at this time or before it. 
                #so we have no root hash to load
                self.logger.debug("Tried appending block hash to timestamp for time earlier than earliest timestamp. "
                                  "Adding to timestamp {}. ".format(timestamp))
            else:

                new_blockchain_head_db = ChainHeadDB(self.db, existing_root_hash)
                if head_hash == BLANK_HASH:
                    new_blockchain_head_db.delete_chain_head_hash(address)
                else:
                    new_blockchain_head_db.set_chain_head_hash(address, head_hash)
                new_blockchain_head_db.persist()
                new_root_hash = new_blockchain_head_db.root_hash

                if starting_timestamp == timestamp:
                    #we already had a root hash for this timestamp. just update the existing one.
                    #self.logger.debug("adding block hash to timestamp without propogating. root hash already existed. updating for time {}".format(timestamp))
                    historical_roots_dict = dict(historical_roots)
                    historical_roots_dict[timestamp] = new_root_hash
                    historical_roots = list(historical_roots_dict.items())
                    #self.logger.debug("finished adding block to timestamp. last_hist_root = {}, current_root_hash = {}".format(historical_roots[-1][1], self.root_hash))
                    #self.logger.debug(new_root_hash)
                else:
                    #self.logger.debug("adding block hash to timestamp without propogating. root hash didnt exist")
                    #sorted_historical_roots = SortedList(historical_roots)
                    historical_roots_dict = dict(historical_roots)
                    for loop_timestamp in range(starting_timestamp, timestamp, TIME_BETWEEN_HEAD_HASH_SAVE):
                        historical_roots_dict[loop_timestamp] = existing_root_hash

                    historical_roots_dict[timestamp] = new_root_hash
                    historical_roots = list(historical_roots_dict.items())
                
        #now propogate the new head hash to any saved historical root hashes newer than this one.
        #effeciently do this by starting from the end and working back. we can assume
        if historical_roots[-1][0] > timestamp:
            self.logger.debug("propogating historical root hash timestamps forward")
            for i in range(len(historical_roots)-1, -1, -1):
                if historical_roots[i][0] <= timestamp:
                    break
                
                root_hash_to_load = historical_roots[i][1]
                new_blockchain_head_db = ChainHeadDB(self.db, root_hash_to_load)
                if head_hash == BLANK_HASH:
                    new_blockchain_head_db.delete_chain_head_hash(address)
                else:
                    new_blockchain_head_db.set_chain_head_hash(address, head_hash)
                new_blockchain_head_db.persist()
                new_root_hash = new_blockchain_head_db.root_hash
                
                #have to do this in case it is a tuple and we cannot modify
                cur_timestamp = historical_roots[i][0]
                historical_roots[i] = [cur_timestamp,new_root_hash]
         
        #lets now make sure our root hash is the same as the last historical. It is possible that another thread or chain object
        #has imported a block since this one was initialized.

        self.save_historical_root_hashes(historical_roots)
        
        self.root_hash = historical_roots[-1][1]