Exemple #1
0
def RandomHuntId():
    """Returns a random hunt id encoded as a hex string."""
    return "%08X" % random.PositiveUInt32()
Exemple #2
0
def RandomFlowId():
  """Returns a random flow id encoded as a hex string."""
  return "%08X" % random.PositiveUInt32()
Exemple #3
0
    def testPositive(self, urandom):
        del urandom  # Unused.

        for _ in range(10):
            self.assertGreater(random.PositiveUInt32(), 0)
Exemple #4
0
def StartFlow(client_id=None,
              cpu_limit=7200,
              creator=None,
              flow_args=None,
              flow_cls=None,
              network_bytes_limit=None,
              original_flow=None,
              output_plugins=None,
              parent_flow_obj=None,
              **kwargs):
    """The main factory function for creating and executing a new flow.

  Args:
    client_id: ID of the client this flow should run on.
    cpu_limit: CPU limit in seconds for this flow.
    creator: Username that requested this flow.
    flow_args: An arg protocol buffer which is an instance of the required
      flow's args_type class attribute.
    flow_cls: Class of the flow that should be started.
    network_bytes_limit: Limit on the network traffic this flow can generated.
    original_flow: A FlowReference object in case this flow was copied from
      another flow.
    output_plugins: An OutputPluginDescriptor object indicating what output
      plugins should be used for this flow.
    parent_flow_obj: A parent flow object. None if this is a top level flow.
    **kwargs: If args or runner_args are not specified, we construct these
      protobufs from these keywords.

  Returns:
    the flow id of the new flow.

  Raises:
    ValueError: Unknown or invalid parameters were provided.
  """
    # Is the required flow a known flow?
    try:
        registry.FlowRegistry.FlowClassByName(flow_cls.__name__)
    except ValueError:
        stats_collector_instance.Get().IncrementCounter(
            "grr_flow_invalid_flow_count")
        raise ValueError("Unable to locate flow %s" % flow_cls.__name__)

    if not client_id:
        raise ValueError("Client_id is needed to start a flow.")

    # Now parse the flow args into the new object from the keywords.
    if flow_args is None:
        flow_args = flow_cls.args_type()

    FilterArgsFromSemanticProtobuf(flow_args, kwargs)
    # At this point we should exhaust all the keyword args. If any are left
    # over, we do not know what to do with them so raise.
    if kwargs:
        raise type_info.UnknownArg("Unknown parameters to StartFlow: %s" %
                                   kwargs)

    # Check that the flow args are valid.
    flow_args.Validate()

    rdf_flow = rdf_flow_objects.Flow(client_id=client_id,
                                     flow_class_name=flow_cls.__name__,
                                     args=flow_args,
                                     create_time=rdfvalue.RDFDatetime.Now(),
                                     creator=creator,
                                     output_plugins=output_plugins,
                                     original_flow=original_flow,
                                     flow_state="RUNNING")

    rdf_flow.flow_id = "%08X" % random.PositiveUInt32()

    if parent_flow_obj:
        parent_rdf_flow = parent_flow_obj.rdf_flow
        rdf_flow.long_flow_id = "%s/%s" % (parent_rdf_flow.long_flow_id,
                                           rdf_flow.flow_id)
        rdf_flow.parent_flow_id = parent_rdf_flow.flow_id
        rdf_flow.parent_request_id = parent_flow_obj.GetCurrentOutboundId()
        if parent_rdf_flow.creator:
            rdf_flow.creator = parent_rdf_flow.creator
    else:
        rdf_flow.long_flow_id = "%s/%s" % (client_id, rdf_flow.flow_id)

    if output_plugins:
        rdf_flow.output_plugins_states = GetOutputPluginStates(
            output_plugins,
            rdf_flow.long_flow_id,
            token=access_control.ACLToken(username=rdf_flow.creator))

    if network_bytes_limit is not None:
        rdf_flow.network_bytes_limit = network_bytes_limit
    if cpu_limit is not None:
        rdf_flow.cpu_limit = cpu_limit

    logging.info(u"Scheduling %s(%s) on %s", rdf_flow.long_flow_id,
                 rdf_flow.flow_class_name, client_id)

    flow_obj = flow_cls(rdf_flow)
    # Just run the first state inline. NOTE: Running synchronously means
    # that this runs on the thread that starts the flow. The advantage is
    # that that Start method can raise any errors immediately.
    flow_obj.Start()
    flow_obj.PersistState()

    # The flow does not need to actually remain running.
    if not flow_obj.outstanding_requests:
        flow_obj.RunStateMethod("End")
        flow_obj.MarkDone()

    data_store.REL_DB.WriteFlowObject(flow_obj.rdf_flow)

    if parent_flow_obj is not None:
        # We can optimize here and not write requests/responses to the database
        # since we have to do this for the parent flow at some point anyways.
        parent_flow_obj.MergeQueuedMessages(flow_obj)
    else:
        flow_obj.FlushQueuedMessages()

        # Publish an audit event, only for top level flows.
        # TODO(amoser): split urn field into dedicated strings.
        events.Events.PublishEvent(
            "Audit",
            rdf_events.AuditEvent(user=creator,
                                  action="RUN_FLOW",
                                  flow_name=rdf_flow.flow_class_name,
                                  urn=rdf_flow.long_flow_id,
                                  client=client_id))

    return rdf_flow.flow_id