Beispiel #1
0
def on_run_end(ctx: ExecutionContext):
    """Ends a workflow.
    
    :param ctx: Execution context information.
    
    """
    # Update ctx.
    ctx.status = ExecutionStatus.COMPLETE

    # Update cache.
    cache.orchestration.set_context(ctx)
    cache.orchestration.set_info_update(ctx, ExecutionAspect.RUN,
                                        ExecutionStatus.COMPLETE)

    # Locks can now be deleted.
    cache.orchestration.delete_locks(ctx)

    # Cache can now be pruned.
    if bool(ctx.prune_on_completion):
        cache.orchestration.prune_on_run_completion(ctx)
        cache.state.prune_on_run_completion(ctx)

    # Notify.
    log_event(EventType.WFLOW_RUN_END, None, ctx)

    # Enqueue next run (when mode=SEQUENTIAL).
    if ctx.execution_mode == ExecutionMode.SEQUENTIAL:
        _loop(ctx)
Beispiel #2
0
def execute(info: DeployDispatchInfo,
            cp2: Account,
            amount: int,
            verbose: bool = True) -> str:
    """Executes a transfer between 2 counter-parties & returns resulting deploy hash.

    :param info: Standard information required to dispatch deploy.
    :param cp2: Account information of counter party 2.
    :param amount: Amount in motes to be transferred.
    :param verbose: Flag inidcating whether event will be logged.
    :returns: Dispatched deploy hash.

    """
    cp1 = info.dispatcher

    deploy_hash = set_deploy.execute(
        info.network, info.node, info.dispatcher, _CONTRACT_FNAME, [
            "--session-arg",
            f"amount:u512='{amount}'",
            "--session-arg",
            f"target:account_hash='account-hash-{cp2.account_hash}'",
        ])

    if verbose:
        log_event(
            EventType.WFLOW_DEPLOY_DISPATCHED,
            f"{info.node.address} :: {deploy_hash} :: transfer (wasm) :: {amount} CSPR :: from {cp1.account_key[:8]} -> {cp2.account_key[:8]} ",
            info.node,
            deploy_hash=deploy_hash,
        )

    return deploy_hash
Beispiel #3
0
def do_run(ctx: ExecutionContext):
    """Runs a workflow.
    
    :param ctx: Execution context information.
    
    """
    # Escape if unexecutable.
    if not _can_start(ctx):
        return

    # Enqueue next run (when mode=PERIODIC).
    if ctx.execution_mode == ExecutionMode.PERIODIC:
        _loop(encoder.clone(ctx))

    # Update ctx.
    ctx.status = ExecutionStatus.IN_PROGRESS

    # Update cache.
    cache.orchestration.set_context(ctx)
    cache.orchestration.set_info(
        factory.create_execution_info(ExecutionAspect.RUN, ctx))

    # Notify.
    log_event(EventType.WFLOW_RUN_START, None, ctx)

    # Enqueue phase.
    do_phase.send(ctx)
Beispiel #4
0
def do_step(ctx: ExecutionContext):
    """Runs a workflow step.
    
    :param ctx: Execution context information.
    
    """
    # Escape if unexecutable.
    if not _can_start(ctx):
        return

    # Set step.
    step = Workflow.get_phase_step(ctx, ctx.phase_index, ctx.step_index + 1)
    if step is None:
        log_event(EventType.WFLOW_STEP_FAILURE, "invalid step", ctx)
        return

    # Update ctx.
    ctx.step_index += 1
    ctx.step_label = step.label

    # Update cache.
    cache.orchestration.set_context(ctx)
    cache.orchestration.set_info(
        factory.create_execution_info(ExecutionAspect.STEP, ctx))

    # Notify.
    log_event(EventType.WFLOW_STEP_START, None, ctx)

    # Execute.
    _execute(ctx, step)
Beispiel #5
0
def _can_start(ctx: ExecutionContext) -> bool:
    """Returns flag indicating whether a step increment is valid.
    
    :param ctx: Execution context information.

    :returns: Flag indicating whether a step increment is valid.

    """
    # False if workflow invalid.
    wflow, wflow_is_valid = predicates.is_valid_wflow(ctx)
    if not wflow_is_valid:
        return False

    # False if current phase not found.
    phase = wflow.get_phase(ctx.phase_index)
    if phase is None:
        log_event(EventType.WFLOW_STEP_ABORT, "invalid phase index", ctx)
        return False

    # False if next step not found.
    step = phase.get_step(ctx.next_step_index)
    if step is None:
        log_event(EventType.WFLOW_STEP_ABORT, "invalid step index", ctx)
        return False

    # False if next step locked - can happen when processing groups of messages.
    if not predicates.was_lock_acquired(ExecutionAspect.STEP, ctx):
        return False

    # All tests passed, therefore return true.
    return True
Beispiel #6
0
def _process_deploy(ctx: _Context):
    """Processes a finalised deploy.
    
    """
    # Set deploy - escape if not found.
    try:
        ctx.on_chain_deploy = chain.get_deploy(ctx.network, ctx.node,
                                               ctx.deploy_hash)
    except Exception as err:
        log_event(EventType.CHAIN_QUERY_DEPLOY_NOT_FOUND, None,
                  ctx.deploy_hash)
        return

    # Emit event.
    log_event(EventType.CHAIN_ADDED_DEPLOY,
              f"{ctx.block_hash}.{ctx.deploy_hash}", ctx.info)

    # Escape if deploy cannot be correlated to a workflow.
    ctx.deploy = cache.state.get_deploy_on_finalisation(
        ctx.network.name, ctx.deploy_hash)
    if not ctx.deploy:
        return

    # Process correlated - i.e. deploys previously dispatched by a generator.
    _process_deploy_correlated(ctx)
Beispiel #7
0
def on_step_end(ctx: ExecutionContext):
    """Ends a workflow step.
    
    :param ctx: Execution context information.
    
    """
    # Set step.
    step = Workflow.get_phase_step(ctx, ctx.phase_index, ctx.step_index)
    if step is None:
        log_event(EventType.WFLOW_STEP_FAILURE, "invalid step", ctx)
        return

    # Update cache.
    cache.orchestration.set_info_update(ctx, ExecutionAspect.STEP,
                                        ExecutionStatus.COMPLETE)

    # Notify.
    log_event(EventType.WFLOW_STEP_END, None, ctx)

    # Enqueue either end of phase or next step.
    if step.is_last:
        # Note: JIT import to avoid circularity.
        from stests.core.orchestration.phase import on_phase_end
        on_phase_end.send(ctx)
    else:
        do_step.send(ctx)
Beispiel #8
0
def encode(data: typing.Any, requires_decoding=True) -> typing.Any:
    """Encodes input data in readiness for downstream processing.
    
    """
    if isinstance(data, PRIMITIVES):
        return data

    if isinstance(data, datetime.datetime):
        return data.timestamp()

    if isinstance(data, dict):
        return {k: encode(v, requires_decoding) for k, v in data.items()}

    if isinstance(data, tuple):
        return tuple(map(lambda i: encode(i, requires_decoding), data))

    if isinstance(data, list):
        return list(map(lambda i: encode(i, requires_decoding), data))

    if type(data) in DCLASS_SET:
        return _encode_dclass(data, dataclasses.asdict(data),
                              requires_decoding)

    if type(data) in ENUM_TYPE_SET:
        return data.name

    log_event(EventType.CORE_ENCODING_FAILURE,
              f"unrecognized data type: {data}")

    return data
Beispiel #9
0
def _can_start(ctx: ExecutionContext) -> bool:
    """Returns flag indicating whether a phase increment is valid.
    
    :param ctx: Execution context information.

    :returns: Flag indicating whether a phase increment is valid.

    """
    # False if workflow invalid.
    wflow, wflow_is_valid = predicates.is_valid_wflow(ctx)
    if not wflow_is_valid:
        return False

    # False if next phase not found.
    phase = wflow.get_phase(ctx.next_phase_index)
    if phase is None:
        log_event(EventType.WFLOW_PHASE_ABORT, None, ctx)
        return False

    # False if next phase locked.
    if not predicates.was_lock_acquired(ExecutionAspect.PHASE, ctx):
        return False

    # All tests passed, therefore return true.
    return True
Beispiel #10
0
def _on_node_event(node: Node, info: NodeEventInfo, payload: dict):
    """Event callback.
    
    """
    # Escape if event not of interest.
    if info.event_type not in _ACTORS:
        return

    # Escape if event already processed - happens when monitoring multiple nodes.
    if info.event_id:
        _, was_lock_acquired = cache.monitoring.set_node_event_info(info)
        if not was_lock_acquired:
            return

    # Notify.
    log_event(
        info.event_type,
        None,
        node,
        event_id=info.event_id,
        block_hash=info.block_hash,
        deploy_hash=info.deploy_hash
        )

    # Dispatch message to actor for further processing.
    actor = _ACTORS[info.event_type]
    actor.send(info)
Beispiel #11
0
def execute(info: DeployDispatchInfo, validator: Account, amount: int, verbose: bool = True) -> str:
    """Submits a deploy delegating an amount of tokens (in motes) to a validator for staking purposes.

    :param info: Information required when dispatching a deploy.
    :param validator: Account information of validator to whom a user is delegating stake.
    :param amount: Amount to submit to auction bid (motes).
    :param verbose: Flag inidcating whether event will be logged.

    :returns: Deploy hash.

    """
    delegator = info.dispatcher

    deploy_hash = set_deploy.execute(
        info.network,
        info.node,
        info.dispatcher,
        _CONTRACT_FNAME,
        [
            "--session-arg", f"amount:u512='{amount}'",
            "--session-arg", f"delegator:public_key='{delegator.account_key}'",
            "--session-arg", f"validator:public_key='{validator.account_key}'",
        ]
    )

    if verbose:
        log_event(
            EventType.WFLOW_DEPLOY_DISPATCHED,
            f"{info.node.address} :: {deploy_hash} :: auction (delegate) :: {amount} CSPR :: from {delegator.account_key[:8]} -> {validator.account_key[:8]} ",
            info.node,
            deploy_hash=deploy_hash,
            )

    return deploy_hash
Beispiel #12
0
def execute(info: DeployDispatchInfo, amount: int, delegation_rate: int, verbose: bool = True) -> str:
    """Submits a bid to network's validator slot auction contract.

    :param info: Information required when dispatching a deploy.
    :param amount: Amount to submit to auction bid (motes).
    :param delegation_rate: Percentage (i.e. rate) of POS reward alloocated to delegators.
    :param verbose: Flag inidcating whether event will be logged.

    :returns: Deploy hash.

    """
    deploy_hash = set_deploy.execute(
        info.network,
        info.node,
        info.dispatcher,
        _CONTRACT_FNAME,
        [
            "--session-arg", f"amount:u512='{amount}'",
            "--session-arg", f"delegation_rate:u8='{delegation_rate}'",
            "--session-arg", f"public_key:public_key='{info.dispatcher.account_key}'",
        ]
    )
    
    if verbose:
        log_event(
            EventType.WFLOW_DEPLOY_DISPATCHED,
            f"{info.node.address} :: {deploy_hash} :: auction add-bid :: {amount} CSPR :: by node {info.node.index} ",
            info.node,
            deploy_hash=deploy_hash,
            )

    return deploy_hash    
Beispiel #13
0
def execute(info: DeployDispatchInfo, amount: int, verbose: bool = True) -> str:
    """Withdraws a bid from network's validator slot auction contract.

    :param info: Information required when dispatching a deploy.
    :param amount: Amount to withdraw from auction bid (motes).
    :param verbose: Flag inidcating whether event will be logged.

    :returns: Deploy hash.

    """
    deploy_hash = set_deploy.execute(
        info.network,
        info.node,
        info.dispatcher,
        _CONTRACT_FNAME,
        [
            "--session-arg", f"amount:u512='{amount}'",
            "--session-arg", f"public_key:public_key='{info.dispatcher.account_key}'",
        ]
    )

    if verbose:
        log_event(
            EventType.WFLOW_DEPLOY_DISPATCHED,
            f"{info.node.address} :: {deploy_hash} :: auction withdraw-bid :: {amount} CSPR :: by node {info.node.index} ",
            info.node,
            deploy_hash=deploy_hash,
            )

    return deploy_hash 
Beispiel #14
0
def start_generator(meta: typing.Any):
    """Entry point.

    :param meta: Generator meta-data.

    """
    # Parse cli args.
    args = meta.ARGS.parse_args()

    # Import worker to setup upstream services / actors.
    _import_actors()

    # Import dramatiq actor used to ping message to broker.
    from stests.core.orchestration.run import do_run

    # Unpack args.
    network_id = factory.create_network_id(args.network_name)
    node_id = factory.create_node_id(network_id, args.node_index)

    # Start generator(s).
    ctx_list = _get_context_list(meta, args, network_id, node_id)
    for ctx in ctx_list:
        do_run.send(ctx)

    # Notify.
    if len(ctx_list) == 1:
        log_event(EventType.WFLOW_GENERATOR_LAUNCHED,
                  f"{ctx.run_type} :: run {ctx.run_index}", ctx)
    else:
        log_event(
            EventType.WFLOW_GENERATORS_LAUNCHED,
            f"{ctx.run_type} :: runs {ctx_list[0].run_index} -> {ctx_list[-1].run_index}",
            ctx)
Beispiel #15
0
def execute(info: DeployDispatchInfo,
            cp2: Account,
            amount: int,
            verbose: bool = True) -> str:
    """Executes a transfer between 2 counter-parties & returns resulting deploy hash.

    :param info: Standard information required to dispatch deploy.
    :param cp2: Account information of counter party 2.
    :param amount: Amount (in motes) to be transferred.
    :param verbose: Flag inidcating whether event will be logged.
    :returns: Dispatched deploy hash.

    """
    binary_path = paths.get_path_to_client(info.network)
    cp1 = info.dispatcher

    cli_response = subprocess.run(
        [
            binary_path,
            _CLIENT_METHOD,
            "--target-account",
            cp2.account_key,
            "--amount",
            str(amount),
            "--chain-name",
            info.network.chain_name,
            "--gas-price",
            str(info.gas_price),
            "--node-address",
            info.node_address,
            "--payment-amount",
            str(info.fee),
            "--secret-key",
            info.dispatcher.get_private_key_pem_filepath(),
            "--transfer-id",
            str(random.randint(1, _MAX_TRANSFER_ID)),
            "--ttl",
            str(info.time_to_live),
        ],
        stdout=subprocess.PIPE,
    )
    deploy_hash = json.loads(cli_response.stdout)['result']['deploy_hash']

    if verbose:
        log_event(
            EventType.WFLOW_DEPLOY_DISPATCHED,
            f"{info.node.address} :: {deploy_hash} :: transfer (native) :: {amount} CSPR :: from {cp1.account_key[:8]} -> {cp2.account_key[:8]} ",
            info.node,
            deploy_hash=deploy_hash,
        )

    return deploy_hash
Beispiel #16
0
def on_step_error(ctx: ExecutionContext, err: str):
    """Ends a workflow step in error.
    
    :param ctx: Execution context information.
    :param err: Execution error information.
    
    """
    # Update cache.
    cache.orchestration.set_info_update(ctx, ExecutionAspect.STEP,
                                        ExecutionStatus.ERROR)

    # Notify.
    log_event(EventType.WFLOW_STEP_ERROR, err, ctx)
Beispiel #17
0
def on_run_error(ctx: ExecutionContext, err: str):
    """Ends a workflow phase in error.
    
    :param ctx: Execution context information.
    :param err: Execution error information.
    
    """
    # Update ctx.
    ctx.status = ExecutionStatus.ERROR

    # Update cache.
    cache.orchestration.set_context(ctx)
    cache.orchestration.set_info_update(ctx, ExecutionAspect.RUN,
                                        ExecutionStatus.ERROR)

    # Notify.
    log_event(EventType.WFLOW_RUN_ERROR, err, ctx)
Beispiel #18
0
def execute():
    """Initialises MQ broker & connects dramatiq library.

    """
    # JIT import to avoid circularity - TODO remove.
    from stests.core.mq.middleware import get_middleware

    # Configure broker.
    broker = get_broker()
    for mware in get_middleware():
        broker.add_middleware(mware)

    # Configure dramatiq.
    dramatiq.set_broker(broker)
    dramatiq.set_encoder(encoder)

    log_event(EventType.CORE_BROKER_CONNECTION_ESTABLISHED, None)
Beispiel #19
0
def execute(node: Node, event_callback: typing.Callable, event_id: int = 0):
    """Hooks upto a node's event stream invoking passed callback for each event.

    :param node: The node to which to bind.
    :param event_callback: Callback to invoke whenever an event of relevant type is received.
    :param event_id: Identifer of event from which to start stream.

    """
    log_event(EventType.MONIT_STREAM_OPENING, node.address_event, node)
    for event_type, event_id, payload, block_hash, deploy_hash, account_key in _yield_events(
            node, event_id):
        event_info = factory.create_node_event_info(
            node,
            event_id,
            event_type,
            block_hash,
            deploy_hash,
            account_key,
        )
        event_callback(node, event_info, payload)
Beispiel #20
0
def is_valid_wflow(ctx: ExecutionContext) -> typing.Tuple[typing.Optional[Workflow], bool]:
    """Predicate determining whether the workflow to be executed is valid or not.
    
    :param ctx: Execution context information.

    """
    # False if workflow unregistered.
    try:
        wflow = Workflow.create(ctx)
    except ValueError:
        log_event(EventType.WFLOW_INVALID, "unregistered", ctx)
        return None, False

    # False if workflow has no phases.
    if not wflow.phases:
        log_event(EventType.WFLOW_INVALID, "has no associated phases", ctx)
        return None, False

    # False if a phase has no steps.
    for phase in wflow.phases:
        if not phase.steps:
            log_event(EventType.WFLOW_INVALID, "a phase has no associated steps", ctx)
            return None, False

    # All tests passed, therefore return true.   
    return wflow, True
Beispiel #21
0
def on_step_deploy_finalized(ctx: ExecutionContext, node_id: NodeIdentifier,
                             block_hash: str, deploy_hash: str):
    """Processes a finalized deploy within the context of a step.
    
    :param ctx: Execution context information.
    :param node_id: Identifier of node that emitted block finalization event.
    :param block_hash: Hash of a finalized block.
    :param deploy_hash: Hash of a finalized deploy.

    """
    # Set step - escape if not found.
    step = Workflow.get_phase_step(ctx, ctx.phase_index, ctx.step_index)
    if step is None:
        log_event(EventType.WFLOW_STEP_FAILURE, "invalid step", ctx)
        return

    # Escape if no deploy verifier.
    if not step.has_verifer_for_deploy:
        log_event(EventType.WFLOW_STEP_FAILURE, "deploy verifier undefined",
                  ctx)
        return

    # Verify deploy.
    try:
        step.verify_deploy(ctx, node_id, block_hash, deploy_hash)
    except AssertionError as err:
        log_event(EventType.WFLOW_STEP_FAILURE,
                  f"deploy verification failed: {err} :: {deploy_hash}", ctx)
        return

    # Increment verified deploy counts.
    _, _, deploy_index = cache.orchestration.increment_deploy_counts(ctx)

    # Verify deploy batch is complete.
    try:
        step.verify_deploy_batch_is_complete(ctx, deploy_index)
    except:
        return

    # Verify step.
    if step.has_verifer:
        try:
            step.verify(ctx)
        except AssertionError as err:
            log_event(EventType.WFLOW_STEP_FAILURE, f"verification failed",
                      ctx)
            return

    # Step verification succeeded therefore signal step end.
    on_step_end.send(ctx)
Beispiel #22
0
def _process_deploy_correlated(ctx: _Context):
    """Process a monitored deploy that was previously dispatched during a generator run.
    
    """
    # Notify.
    log_event(EventType.WFLOW_DEPLOY_CORRELATED,
              f"{ctx.block_hash}.{ctx.deploy_hash}",
              ctx.node,
              block_hash=ctx.block_hash,
              deploy_hash=ctx.deploy_hash)

    # Update cache: deploy.
    ctx.deploy.block_hash = ctx.block_hash
    try:
        ctx.deploy.deploy_cost = int(ctx.on_chain_deploy["execution_results"]
                                     [0]["result"]["Success"]["cost"])
    except KeyError:
        try:
            ctx.deploy.deploy_cost = int(
                ctx.on_chain_deploy["execution_results"][0]["result"]["cost"])
        except KeyError:
            ctx.deploy.deploy_cost = 0

    ctx.deploy.era_id = ctx.block.era_id
    ctx.deploy.finalization_duration = ctx.block.timestamp.timestamp(
    ) - ctx.deploy.dispatch_timestamp.timestamp()
    ctx.deploy.finalization_node_index = ctx.node.index
    ctx.deploy.finalization_timestamp = ctx.block.timestamp
    ctx.deploy.state_root_hash = ctx.block.state_root_hash
    ctx.deploy.status = DeployStatus.ADDED
    cache.state.set_deploy(ctx.deploy)

    # Update cache: account balance.
    if ctx.deploy.deploy_cost > 0:
        cache.state.decrement_account_balance_on_deploy_finalisation(
            ctx.deploy, ctx.deploy.deploy_cost)

    # Enqueue message for processing by orchestrator.
    _enqueue_correlated(ctx)
Beispiel #23
0
def do_step_verification(ctx: ExecutionContext):
    """Verifies a workflow step prior to signalling end.
    
    :param ctx: Execution context information.
    
    """
    # Set step.
    step = Workflow.get_phase_step(ctx, ctx.phase_index, ctx.step_index)
    if step is None:
        log_event(EventType.WFLOW_STEP_FAILURE, "invalid step", ctx)
        return

    # Verify step.
    if step.has_verifer and not step.has_verifer_for_deploy:
        try:
            step.verify(ctx)
        except AssertionError as err:
            log_event(EventType.WFLOW_STEP_FAILURE, "verification failed", ctx)
            return

    # Enqueue step end.
    on_step_end.send(ctx)
Beispiel #24
0
def do_monitor_node(node_id: NodeIdentifier):
    """Launches node monitoring.
    
    :node_id: Identifier of node to be monitored.

    """
    # Set lock.
    locked = False
    for i in range(_MONITORS_PER_NODE):
        lock = factory.create_node_monitoring_lock(node_id, i + 1)
        _, lock_acquired = cache.monitoring.set_node_monitor_lock(lock)
        if lock_acquired:
            break

    # Escape if sufficient locks are already in place.
    if not lock_acquired:
        return

    # Monitor node by listening to & processing node events.
    try:
        listener.bind_to_stream(cache.infra.get_node(node_id), )

    # Exception: actor timeout.
    except TimeLimitExceeded:
        do_monitor_node.send(node_id)

    # Exception: process shutdown.
    except Shutdown:
        pass

    # Exception: chain exception, e.g. node down, comms channel issue ...etc.
    except Exception as err:
        log_event(EventType.MONIT_STREAM_BIND_ERROR, err, node_id)
        do_monitor_node.send(node_id)

    # Release lock.
    finally:
        cache.monitoring.delete_node_monitor_lock(lock)
Beispiel #25
0
def on_phase_end(ctx: ExecutionContext):
    """Ends a workflow phase.
    
    :param ctx: Execution context information.
    
    """
    # Set phase.
    phase = Workflow.get_phase_(ctx, ctx.phase_index)

    # Update cache.
    cache.orchestration.set_info_update(ctx, ExecutionAspect.PHASE,
                                        ExecutionStatus.COMPLETE)

    # Notify.
    log_event(EventType.WFLOW_PHASE_END, None, ctx)

    # Enqueue either end of workflow or next phase.
    if phase.is_last:
        # JIT import to avoid circularity.
        from stests.core.orchestration.run import on_run_end
        on_run_end.send(ctx)
    else:
        do_phase.send(ctx)
Beispiel #26
0
def do_phase(ctx: ExecutionContext):
    """Runs a workflow phase.
    
    :param ctx: Execution context information.
    
    """
    # Escape if unexecutable.
    if not _can_start(ctx):
        return

    # Update ctx.
    ctx.phase_index += 1
    ctx.step_index = 0

    # Update cache.
    cache.orchestration.set_context(ctx)
    cache.orchestration.set_info(
        factory.create_execution_info(ExecutionAspect.PHASE, ctx))

    # Notify.
    log_event(EventType.WFLOW_PHASE_START, None, ctx)

    # Enqueue step.
    do_step.send(ctx)
Beispiel #27
0
def _can_start(ctx: ExecutionContext) -> bool:
    """Returns flag indicating whether a run increment is valid.
    
    :param ctx: Execution context information.

    :returns: Flag indicating whether a run increment is valid.

    """
    # False if workflow invalid.
    _, wflow_is_valid = predicates.is_valid_wflow(ctx)
    if not wflow_is_valid:
        return False

    # False if phase/step are not initialised.
    if ctx.phase_index != 0 or ctx.step_index != 0:
        log_event(EventType.WFLOW_RUN_ABORT, None, ctx)
        return False

    # False if locked.
    if not predicates.was_lock_acquired(ExecutionAspect.RUN, ctx):
        return False

    # All tests passed, therefore return true.
    return True
Beispiel #28
0
def _process_block(ctx: _Context):
    """Processes a finalised block.
    
    """
    # Escape if block not found.
    try:
        ctx.on_chain_block = chain.get_block(ctx.network, ctx.node,
                                             ctx.block_hash)
    except Exception as err:
        log_event(EventType.CHAIN_QUERY_BLOCK_NOT_FOUND, None, ctx.block_hash)
        return

    # Escape if block empty.
    if not ctx.deploy_hashes and not ctx.transfer_hashes:
        log_event(EventType.CHAIN_ADDED_BLOCK_EMPTY, None, ctx.block_hash)
        return

    # Set stats.
    ctx.block = factory.create_block_statistics_on_addition(
        block_hash=ctx.block_hash,
        block_hash_parent=ctx.on_chain_block['header']['parent_hash'],
        chain_name=ctx.network.chain_name,
        era_id=ctx.on_chain_block['header']['era_id'],
        deploy_cost_total=None,
        deploy_count=len(ctx.deploy_hashes) + len(ctx.transfer_hashes),
        deploy_gas_price_avg=None,
        height=ctx.on_chain_block['header']['height'],
        is_switch_block=ctx.on_chain_block['header']['era_end'] is not None,
        network=ctx.network.name,
        proposer=ctx.on_chain_block['header']['proposer'],
        size_bytes=None,
        state_root_hash=ctx.on_chain_block['header']['state_root_hash'],
        status=BlockStatus.FINALIZED.name,
        timestamp=datetime.strptime(ctx.on_chain_block['header']['timestamp'],
                                    "%Y-%m-%dT%H:%M:%S.%fZ"),
    )

    # Emit event.
    log_event(EventType.CHAIN_ADDED_BLOCK, f"{ctx.block_hash}", ctx.block)

    # Process associated deploys + transfers.
    _process_block_deploys(ctx)
Beispiel #29
0
def _parse_event(
    node: Node,
    event_id: int,
    payload: dict,
) -> typing.Tuple[EventType, int,  # event id
                  dict,  # payload
                  typing.Optional[str],  # block hash
                  typing.Optional[str],  # deploy hash
                  typing.Optional[str],  # account key
                  ]:
    """Parses raw event data for upstream processing.

    """
    if 'ApiVersion' in payload:
        return

    elif 'BlockAdded' in payload:
        return \
            EventType.MONIT_BLOCK_ADDED, \
            event_id, \
            payload, \
            payload['BlockAdded']['block_hash'], \
            None, \
            None

    elif 'BlockFinalized' in payload:
        return \
            EventType.MONIT_BLOCK_FINALIZED, \
            event_id, \
            payload, \
            payload['BlockFinalized']['proto_block']['hash'], \
            None, \
            None

    elif 'FinalitySignature' in payload:
        return \
            EventType.MONIT_CONSENSUS_FINALITY_SIGNATURE, \
            event_id, \
            payload, \
            payload['FinalitySignature']['block_hash'], \
            None, \
            payload['FinalitySignature']['public_key']

    elif 'Fault' in payload:
        return \
            EventType.MONIT_CONSENSUS_FAULT, \
            event_id, \
            payload, \
            payload['Fault']['era_id'], \
            None, \
            None

    elif 'DeployProcessed' in payload:
        return \
            EventType.MONIT_DEPLOY_PROCESSED, \
            event_id, \
            payload, \
            payload['DeployProcessed']['block_hash'], \
            payload['DeployProcessed']['deploy_hash'], \
            None

    log_event(
        EventType.MONIT_STREAM_EVENT_TYPE_UNKNOWN,
        f"event skipped as type is unsupported :: node={node.address_rpc} :: event type={list(payload.keys())[0]}",
        node)