Exemplo n.º 1
0
    def __init__(self,
                 registry: BaseContractRegistry,
                 checksum_address: str = None,
                 *args,
                 **kwargs):

        super().__init__(*args, **kwargs)
        self.log = Logger('stake-tracker')
        self.staking_agent = ContractAgency.get_agent(StakingEscrowAgent,
                                                      registry=registry)
        from nucypher.blockchain.economics import EconomicsFactory
        self.economics = EconomicsFactory.get_economics(registry=registry)

        self.__initial_period = NOT_STAKING
        self.__terminal_period = NOT_STAKING

        # "load-in":  Read on-chain stakes
        # Allow stake tracker to be initialized as an empty collection.
        if checksum_address:
            if not is_checksum_address(checksum_address):
                raise ValueError(
                    f'{checksum_address} is not a valid EIP-55 checksum address'
                )
        self.checksum_address = checksum_address
        self.__updated = None
Exemplo n.º 2
0
    def __init__(self,
                 db_filepath: str,
                 rest_host: str,
                 rest_port: int,
                 client_password: str = None,
                 crash_on_error: bool = False,
                 distribute_ether: bool = True,
                 registry: BaseContractRegistry = None,
                 *args,
                 **kwargs):

        # Character
        super().__init__(registry=registry, *args, **kwargs)
        self.log = Logger(f"felix-{self.checksum_address[-6::]}")

        # Network
        self.rest_port = rest_port
        self.rest_host = rest_host
        self.rest_app = NOT_RUNNING
        self.crash_on_error = crash_on_error

        # Database
        self.db_filepath = db_filepath
        self.db = NO_DATABASE_AVAILABLE
        self.db_engine = create_engine(f'sqlite:///{self.db_filepath}',
                                       convert_unicode=True)

        # Blockchain
        transacting_power = TransactingPower(password=client_password,
                                             account=self.checksum_address,
                                             cache=True)
        self._crypto_power.consume_power_up(transacting_power)

        self.token_agent = ContractAgency.get_agent(NucypherTokenAgent,
                                                    registry=registry)
        self.blockchain = self.token_agent.blockchain
        self.reserved_addresses = [
            self.checksum_address, BlockchainInterface.NULL_ADDRESS
        ]

        # Update reserved addresses with deployed contracts
        existing_entries = list(registry.enrolled_addresses)
        self.reserved_addresses.extend(existing_entries)

        # Distribution
        self.__distributed = 0  # Track NU Output
        self.__airdrop = 0  # Track Batch
        self.__disbursement = 0  # Track Quantity
        self._distribution_task = LoopingCall(f=self.airdrop_tokens)
        self._distribution_task.clock = self._CLOCK
        self.start_time = NOT_RUNNING

        self.economics = EconomicsFactory.get_economics(registry=registry)
        self.MAXIMUM_DISBURSEMENT = self.economics.maximum_allowed_locked
        self.INITIAL_DISBURSEMENT = self.economics.minimum_allowed_locked * 3

        # Optionally send ether with each token transaction
        self.distribute_ether = distribute_ether
        # Banner
        self.log.info(FELIX_BANNER.format(self.checksum_address))
def test_retrieving_from_blockchain(token_economics, test_registry):

    economics = EconomicsFactory.get_economics(registry=test_registry)

    assert economics.staking_deployment_parameters == token_economics.staking_deployment_parameters
    assert economics.slashing_deployment_parameters == token_economics.slashing_deployment_parameters
    assert economics.worklock_deployment_parameters == token_economics.worklock_deployment_parameters
Exemplo n.º 4
0
def confirm_activity(general_config, character_options, config_file):
    """
    Manually confirm-activity for the current period.
    """
    emitter = _setup_emitter(general_config,
                             character_options.config_options.worker_address)
    _pre_launch_warnings(emitter,
                         dev=character_options.config_options.dev,
                         force=None)
    _, URSULA = character_options.create_character(emitter,
                                                   config_file,
                                                   general_config.json_ipc,
                                                   load_seednodes=False)

    confirmed_period = URSULA.staking_agent.get_current_period() + 1
    click.echo(f"Confirming activity for period {confirmed_period}",
               color='blue')
    receipt = URSULA.confirm_activity()

    economics = EconomicsFactory.get_economics(registry=URSULA.registry)
    date = datetime_at_period(period=confirmed_period,
                              seconds_per_period=economics.seconds_per_period)

    # TODO: Double-check dates here
    emitter.echo(
        f'\nActivity confirmed for period #{confirmed_period} '
        f'(starting at {date})',
        bold=True,
        color='blue')
    painting.paint_receipt_summary(
        emitter=emitter,
        receipt=receipt,
        chain_name=URSULA.staking_agent.blockchain.client.chain_name)
Exemplo n.º 5
0
def commit_to_next_period(general_config, character_options, config_file):
    """Manually make a commitment to the next period."""

    # Setup
    emitter = setup_emitter(general_config,
                            character_options.config_options.worker_address)
    _pre_launch_warnings(emitter,
                         dev=character_options.config_options.dev,
                         force=None)
    _, URSULA = character_options.create_character(emitter,
                                                   config_file,
                                                   general_config.json_ipc,
                                                   load_seednodes=False)

    committed_period = URSULA.staking_agent.get_current_period() + 1
    click.echo(
        CONFIRMING_ACTIVITY_NOW.format(committed_period=committed_period),
        color='blue')
    receipt = URSULA.commit_to_next_period()

    economics = EconomicsFactory.get_economics(registry=URSULA.registry)
    date = datetime_at_period(period=committed_period,
                              seconds_per_period=economics.seconds_per_period)

    # TODO: Double-check dates here
    message = SUCCESSFUL_CONFIRM_ACTIVITY.format(
        committed_period=committed_period, date=date)
    emitter.echo(message, bold=True, color='blue')
    paint_receipt_summary(
        emitter=emitter,
        receipt=receipt,
        chain_name=URSULA.staking_agent.blockchain.client.chain_name)
Exemplo n.º 6
0
 def _measure_time_remaining(self) -> str:
     current_period = self.staking_agent.get_current_period()
     economics = EconomicsFactory.get_economics(registry=self.registry)
     next_period = datetime_at_period(
         period=current_period + 1,
         seconds_per_period=economics.seconds_per_period)
     remaining = str(next_period - maya.now())
     return remaining
Exemplo n.º 7
0
    def __init__(self,
                 registry: BaseContractRegistry,
                 checksum_address: ChecksumAddress = None,  # allow for lazy setting
                 *args, **kwargs):

        super().__init__(*args, **kwargs)
        self.log = Logger('stake-tracker')
        self.staking_agent = ContractAgency.get_agent(StakingEscrowAgent, registry=registry)
        from nucypher.blockchain.economics import EconomicsFactory
        self.economics = EconomicsFactory.get_economics(registry=registry)

        self.__initial_period = NOT_STAKING
        self.__terminal_period = NOT_STAKING

        # "load-in" Read on-chain stakes
        self.checksum_address = checksum_address
        self.__updated = None
Exemplo n.º 8
0
def test_retrieving_from_blockchain(application_economics, test_registry):
    economics = EconomicsFactory.get_economics(registry=test_registry)
    assert economics.pre_application_deployment_parameters == application_economics.pre_application_deployment_parameters
Exemplo n.º 9
0
    def _learn_about_nodes(self, threaded: bool = True):
        if threaded:
            if self.__collecting_nodes:
                self.log.debug(
                    "Skipping Round - Nodes collection thread is already running"
                )
                return
            return reactor.callInThread(self._learn_about_nodes,
                                        threaded=False)
        self.__collecting_nodes = True

        agent = self.staking_agent
        known_nodes = list(self.known_nodes)

        block_time = agent.blockchain.client.w3.eth.getBlock(
            'latest').timestamp  # precision in seconds
        current_period = agent.get_current_period()

        log = f'Processing {len(known_nodes)} nodes at {MayaDT(epoch=block_time)} | Period {current_period}'
        self.log.info(log)

        data = list()
        for node in known_nodes:

            staker_address = node.checksum_address
            worker = agent.get_worker_from_staker(staker_address)

            stake = agent.owned_tokens(staker_address)
            staked_nu_tokens = float(NU.from_nunits(stake).to_tokens())
            locked_nu_tokens = float(
                NU.from_nunits(
                    agent.get_locked_tokens(
                        staker_address=staker_address)).to_tokens())

            economics = EconomicsFactory.get_economics(registry=self.registry)
            stakes = StakeList(checksum_address=staker_address,
                               registry=self.registry)
            stakes.refresh()

            if stakes.initial_period is NOT_STAKING:
                continue  # TODO: Skip this measurement for now

            start_date = datetime_at_period(
                stakes.initial_period,
                seconds_per_period=economics.seconds_per_period)
            start_date = start_date.datetime().timestamp()
            end_date = datetime_at_period(
                stakes.terminal_period,
                seconds_per_period=economics.seconds_per_period)
            end_date = end_date.datetime().timestamp()

            last_confirmed_period = agent.get_last_active_period(
                staker_address)

            num_work_orders = 0  # len(node.work_orders())  # TODO: Only works for is_me with datastore attached

            # TODO: do we need to worry about how much information is in memory if number of nodes is
            #  large i.e. should I check for size of data and write within loop if too big
            data.append(
                self.NODE_LINE_PROTOCOL.format(
                    measurement=self.NODE_MEASUREMENT,
                    staker_address=staker_address,
                    worker_address=worker,
                    start_date=start_date,
                    end_date=end_date,
                    stake=staked_nu_tokens,
                    locked_stake=locked_nu_tokens,
                    current_period=current_period,
                    last_confirmed_period=last_confirmed_period,
                    timestamp=block_time,
                    work_orders=num_work_orders))

        success = self._influx_client.write_points(
            data,
            database=self.INFLUX_DB_NAME,
            time_precision='s',
            batch_size=10000,
            protocol='line')
        self.__collecting_nodes = False
        if not success:
            # TODO: What do we do here - Event hook for alerting?
            self.log.warn(
                f'Unable to write node information to database {self.INFLUX_DB_NAME} at '
                f'{MayaDT(epoch=block_time)} | Period {current_period}')
Exemplo n.º 10
0
    def __init__(self,
                 influx_host: str,
                 influx_port: int,
                 crawler_http_port: int = DEFAULT_CRAWLER_HTTP_PORT,
                 registry: BaseContractRegistry = None,
                 node_storage_filepath: str = CrawlerNodeStorage.
                 DEFAULT_DB_FILEPATH,
                 refresh_rate=DEFAULT_REFRESH_RATE,
                 restart_on_error=True,
                 *args,
                 **kwargs):

        # Settings
        self.federated_only = False  # Nope - for compatibility with Learner TODO # nucypher/466
        Teacher.set_federated_mode(False)

        self.registry = registry or InMemoryContractRegistry.from_latest_publication(
        )
        self.economics = EconomicsFactory.get_economics(registry=self.registry)
        self._refresh_rate = refresh_rate
        self._restart_on_error = restart_on_error

        # TODO: Needs cleanup
        # Tracking
        node_storage = CrawlerNodeStorage(
            storage_filepath=node_storage_filepath)

        class MonitoringTracker(FleetStateTracker):
            def record_fleet_state(self, *args, **kwargs):
                new_state_or_none = super().record_fleet_state(*args, **kwargs)
                if new_state_or_none:
                    _, new_state = new_state_or_none
                    state = self.abridged_state_details(new_state)
                    node_storage.store_state_metadata(state)

        self.tracker_class = MonitoringTracker

        super().__init__(save_metadata=True,
                         node_storage=node_storage,
                         *args,
                         **kwargs)
        self.log = Logger(self.__class__.__name__)
        self.log.info(
            f"Storing node metadata in DB: {node_storage.db_filepath}")
        self.log.info(
            f"Storing blockchain metadata in DB: {influx_host}:{influx_port}")

        # In-memory Metrics
        self._stats = {'status': 'initializing'}
        self._crawler_client = None

        # Initialize InfluxDB
        self._db_host = influx_host
        self._db_port = influx_port
        self._influx_client = None

        # Agency
        self.staking_agent = ContractAgency.get_agent(StakingEscrowAgent,
                                                      registry=self.registry)

        # Crawler Tasks
        self.__collection_round = 0
        self.__collecting_nodes = False  # thread tracking
        self.__collecting_stats = False
        self.__events_from_block = 0  # from the beginning
        self.__collecting_events = False

        self._node_details_task = task.LoopingCall(self._learn_about_nodes)
        self._stats_collection_task = task.LoopingCall(self._collect_stats,
                                                       threaded=True)
        self._events_collection_task = task.LoopingCall(self._collect_events)

        # JSON Endpoint
        self._crawler_http_port = crawler_http_port
        self._flask = None