Beispiel #1
0
  def testArgParsing(self):
    """Test that arguments can be extracted and annotated successfully."""

    # Should raise on parsing default.
    with self.assertRaises(ValueError):
      flow.StartFlow(
          client_id=self.client_id,
          flow_name="BadArgsFlow1",
          arg1=False,
          token=self.token)

    # Should not raise now if we provide the correct type.
    flow.StartFlow(
        client_id=self.client_id,
        flow_name="BadArgsFlow1",
        arg1=rdf_paths.PathSpec(),
        token=self.token)
Beispiel #2
0
    def Handle(self, args, context=None):
        flow_id = flow.StartFlow(flow_cls=discovery.Interrogate,
                                 client_id=str(args.client_id),
                                 creator=context.username)

        # TODO(user): don't encode client_id inside the operation_id, but
        # rather have it as a separate field.
        return ApiInterrogateClientResult(operation_id=flow_id)
Beispiel #3
0
    def CreateMultiGetFileFlow(self, client_id, file_path):
        pathspec = rdf_paths.PathSpec(path=file_path,
                                      pathtype=rdf_paths.PathSpec.PathType.OS)
        flow_args = transfer.MultiGetFileArgs(pathspecs=[pathspec])

        return flow.StartFlow(client_id=client_id,
                              flow_cls=transfer.MultiGetFile,
                              flow_args=flow_args)
Beispiel #4
0
    def testExceptionInStart(self):
        flow_id = flow.StartFlow(flow_cls=FlowWithBrokenStart,
                                 client_id=self.client_id)
        flow_obj = data_store.REL_DB.ReadFlowObject(self.client_id, flow_id)

        self.assertEqual(flow_obj.flow_state, flow_obj.FlowState.ERROR)
        self.assertEqual(flow_obj.error_message, "boo")
        self.assertIsNotNone(flow_obj.backtrace)
Beispiel #5
0
  def CreateRecursiveListFlow(self, client_id, token):
    flow_args = filesystem.RecursiveListDirectoryArgs()

    return flow.StartFlow(
        client_id=client_id,
        flow_name=filesystem.RecursiveListDirectory.__name__,
        args=flow_args,
        token=token)
Beispiel #6
0
    def FlowSetup(self, name, client_id=None):
        if client_id is None:
            client_id = self.client_id

        session_id = flow.StartFlow(client_id=client_id,
                                    flow_name=name,
                                    token=self.token)

        return aff4.FACTORY.Open(session_id, mode="rw", token=self.token)
Beispiel #7
0
    def testInitializesClientIdForClientBasedFlows(self):
        client_id = self.SetupClient(0)
        flow_id = flow.StartFlow(client_id=client_id,
                                 flow_cls=processes.ListProcesses)
        flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id)
        flow_api_obj = flow_plugin.ApiFlow().InitFromFlowObject(flow_obj)

        self.assertEqual(flow_api_obj.client_id,
                         client_plugin.ApiClientId(client_id))
Beispiel #8
0
    def testFlowWithoutResultsCorrectlyReportsEmptyResultMetadata(self):
        client_id = self.SetupClient(0)
        flow_id = flow.StartFlow(client_id=client_id,
                                 flow_cls=flow_test_lib.DummyFlow)
        flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id)

        flow_api_obj = flow_plugin.ApiFlow().InitFromFlowObject(flow_obj)
        self.assertIsNotNone(flow_api_obj.result_metadata)
        self.assertEmpty(flow_api_obj.result_metadata.num_results_per_type_tag)
Beispiel #9
0
    def testNoClientActionIsDisplayedWhenFlowIsStarted(self):
        self.RequestAndGrantClientApproval(self.client_id)

        self.Open("/#/clients/%s/load-stats" % self.client_id)
        self.WaitUntil(self.IsTextPresent, "No actions currently in progress.")

        flow.StartFlow(client_id=rdf_client.ClientURN(self.client_id),
                       flow_name=processes.ListProcesses.__name__,
                       token=self.token)
Beispiel #10
0
    def testDoesNothingIfAgeLimitNotSetInConfig(self):
        with test_lib.FakeTime(40 + 60 * self.NUM_HUNTS):
            flow.StartFlow(flow_name=data_retention.CleanHunts.__name__,
                           sync=True,
                           token=self.token)

        hunts_urns = list(
            aff4.FACTORY.Open("aff4:/hunts", token=self.token).ListChildren())
        self.assertEqual(len(hunts_urns), 10)
    def Run(self):
        client_urn = self.SetupClient(0)
        client_id = client_urn.Basename()

        acl_test_lib.CreateUser(self.token.username)

        # Create a running mock refresh operation.
        running_flow_id = self.CreateMultiGetFileFlow(
            client_urn, file_path="fs/os/c/bin/bash", token=self.token)

        # Create a mock refresh operation and complete it.
        finished_flow_id = self.CreateMultiGetFileFlow(
            client_urn, file_path="fs/os/c/bin/bash", token=self.token)

        if data_store.RelationalDBEnabled():
            flow_base.TerminateFlow(client_id,
                                    finished_flow_id,
                                    reason="Fake Error")

            # Create an arbitrary flow to check on 404s.
            non_update_flow_id = flow.StartFlow(client_id=client_id,
                                                flow_cls=discovery.Interrogate)

        else:
            finished_flow_urn = client_urn.Add("flows").Add(finished_flow_id)
            with aff4.FACTORY.Open(finished_flow_urn,
                                   aff4_type=flow.GRRFlow,
                                   mode="rw",
                                   token=self.token) as flow_obj:
                flow_obj.GetRunner().Error("Fake error")

            # Create an arbitrary flow to check on 404s.
            non_update_flow_id = flow.StartAFF4Flow(
                client_id=client_urn,
                flow_name=discovery.Interrogate.__name__,
                token=self.token).Basename()

        # Unkonwn flow ids should also cause 404s.
        unknown_flow_id = "F:12345678"

        # Check both operations.
        self.Check("GetVfsFileContentUpdateState",
                   args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs(
                       client_id=client_id, operation_id=running_flow_id),
                   replace={running_flow_id: "W:ABCDEF"})
        self.Check("GetVfsFileContentUpdateState",
                   args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs(
                       client_id=client_id, operation_id=finished_flow_id),
                   replace={finished_flow_id: "W:ABCDEF"})
        self.Check("GetVfsFileContentUpdateState",
                   args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs(
                       client_id=client_id, operation_id=non_update_flow_id),
                   replace={non_update_flow_id: "W:ABCDEF"})
        self.Check("GetVfsFileContentUpdateState",
                   args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs(
                       client_id=client_id, operation_id=unknown_flow_id),
                   replace={unknown_flow_id: "W:ABCDEF"})
Beispiel #12
0
 def _StartFlow(self, flow_cls, **kw):
   if data_store.RelationalDBFlowsEnabled():
     return flow.StartFlow(flow_cls=flow_cls, client_id=self.client_id, **kw)
   else:
     return flow.StartAFF4Flow(
         flow_name=compatibility.GetName(flow_cls),
         client_id=self.client_id,
         token=self.token,
         **kw)
Beispiel #13
0
    def testHeartBeatingFlowIsNotTreatedAsStuck(self):
        worker_obj = worker_lib.GRRWorker(token=self.token)
        initial_time = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(100)

        stuck_flows_timeout = flow_runner.FlowRunner.stuck_flows_timeout
        lease_timeout = rdfvalue.Duration(worker_lib.GRRWorker.flow_lease_time)

        WorkerStuckableTestFlow.Reset(heartbeat=True)
        try:
            with test_lib.FakeTime(initial_time.AsSecondsSinceEpoch()):
                session_id = flow.StartFlow(
                    flow_name=WorkerStuckableTestFlow.__name__,
                    client_id=self.client_id,
                    token=self.token,
                    sync=False)
                # Process all messages
                worker_obj.RunOnce()
                # Wait until worker thread starts processing the flow.
                WorkerStuckableTestFlow.WaitUntilWorkerStartsProcessing()

            # Increase the time in steps, using LetFlowHeartBeat/WaitForFlowHeartBeat
            # to control the flow execution that happens in the parallel thread.
            current_time = rdfvalue.RDFDatetime(initial_time)
            future_time = initial_time + stuck_flows_timeout + rdfvalue.Duration(
                "1m")
            while current_time <= future_time:
                current_time += lease_timeout - rdfvalue.Duration("1s")

                with test_lib.FakeTime(current_time.AsSecondsSinceEpoch()):
                    checked_flow = aff4.FACTORY.Open(session_id,
                                                     token=self.token)
                    WorkerStuckableTestFlow.LetFlowHeartBeat()
                    WorkerStuckableTestFlow.WaitForFlowHeartBeat(
                        last_heartbeat=current_time > future_time)
            # Now current_time is > future_time, where future_time is the time
            # when stuck flow should have been killed. Calling RunOnce() here,
            # because if the flow is going to be killed, it will be killed
            # during worker.RunOnce() call.
            with test_lib.FakeTime(current_time.AsSecondsSinceEpoch()):
                worker_obj.RunOnce()

            # Check that the flow wasn't killed forecfully.
            checked_flow = aff4.FACTORY.Open(session_id, token=self.token)
            self.assertEqual(checked_flow.context.state,
                             rdf_flow_runner.FlowContext.State.RUNNING)

        finally:
            # Release the semaphore so that worker thread unblocks and finishes
            # processing the flow.
            with test_lib.FakeTime(current_time.AsSecondsSinceEpoch()):
                WorkerStuckableTestFlow.LetWorkerFinishProcessing()
                worker_obj.thread_pool.Join()

        # Check that the flow has finished normally.
        checked_flow = aff4.FACTORY.Open(session_id, token=self.token)
        self.assertEqual(checked_flow.context.state,
                         rdf_flow_runner.FlowContext.State.TERMINATED)
Beispiel #14
0
    def testUserChangesToCopiedFlowAreRespected(self):
        args = flows_processes.ListProcessesArgs(filename_regex="test[a-z]*",
                                                 fetch_binaries=True)
        flow.StartFlow(flow_name=flows_processes.ListProcesses.__name__,
                       args=args,
                       client_id=self.client_id,
                       output_plugins=[self.email_descriptor],
                       token=self.token)

        # Navigate to client and select newly created flow.
        self.Open("/#/clients/C.0000000000000001/flows")
        self.Click("css=td:contains('ListProcesses')")

        # Open wizard and change the arguments.
        self.Click("css=button[name=copy_flow]")

        self.Type("css=label:contains('Filename Regex') ~ * input",
                  "somethingElse*")

        self.Click(
            "css=label:contains('Fetch Binaries') ~ * input[type=checkbox]")

        # Change output plugin and add another one.
        self.Click("css=label:contains('Output Plugins') ~ * button")
        self.Select(
            "css=grr-output-plugin-descriptor-form "
            "label:contains('Plugin') ~ * select:eq(0)", "DummyOutputPlugin")
        self.Type(
            "css=grr-output-plugin-descriptor-form "
            "label:contains('Filename Regex'):eq(0) ~ * input:text", "foobar!")

        self.Click("css=button:contains('Launch')")

        # Check that flows list got updated and that the new flow is selected.
        self.WaitUntil(
            self.IsElementPresent,
            "css=grr-client-flows-list tr:contains('ListProcesses'):nth(1)")
        self.WaitUntil(
            self.IsElementPresent, "css=grr-client-flows-list "
            "tr:contains('ListProcesses'):nth(0).row-selected")

        # Now open the last flow and check that it has the changes we made.
        fd = aff4.FACTORY.Open(self.client_id.Add("flows"), token=self.token)
        flows = sorted(fd.ListChildren(), key=lambda x: x.age)
        fobj = aff4.FACTORY.Open(flows[-1], token=self.token)

        self.assertEqual(
            fobj.args,
            flows_processes.ListProcessesArgs(
                filename_regex="somethingElse*", ))
        self.assertListEqual(list(fobj.runner_args.output_plugins), [
            rdf_output_plugin.OutputPluginDescriptor(
                plugin_name=gui_test_lib.DummyOutputPlugin.__name__,
                plugin_args=flows_processes.ListProcessesArgs(
                    filename_regex="foobar!")), self.email_descriptor
        ])
Beispiel #15
0
  def Update(self, attribute=None):
    if attribute == "CONTAINS":
      flow_id = flow.StartFlow(
          client_id=self.client_id,
          # TODO(user): dependency loop with flows/general/discover.py
          # flow_name=discovery.Interrogate.__name__,
          flow_name="Interrogate",
          token=self.token)

      return flow_id
Beispiel #16
0
    def testDoesNothingIfAgeLimitNotSetInConfig(self):
        with test_lib.FakeTime(40 + 60 * self.NUM_CRON_RUNS):
            flow.StartFlow(flow_name=data_retention.CleanCronJobs.__name__,
                           sync=True,
                           token=self.token)

        for name in self.cron_jobs_names:
            runs = cronjobs.GetCronManager().ReadJobRuns(name,
                                                         token=self.token)
            self.assertEqual(len(runs), self.NUM_CRON_RUNS)
    def testFlowFilesArchiveRaisesIfFlowWasNotCreatedBySameRouter(self):
        flow_urn = flow.StartFlow(client_id=self.client_id,
                                  flow_name=file_finder.FileFinder.__name__,
                                  token=self.token)

        router = self._CreateRouter()
        with self.assertRaises(access_control.UnauthorizedAccess):
            router.GetFlowFilesArchive(api_flow.ApiGetFlowFilesArchiveArgs(
                client_id=self.client_id, flow_id=flow_urn.Basename()),
                                       token=self.token)
Beispiel #18
0
    def testFlowWithoutFlowProgressTypeReportsDefaultFlowProgress(self):
        client_id = self.SetupClient(0)
        flow_id = flow.StartFlow(client_id=client_id,
                                 flow_cls=flow_test_lib.DummyFlow)
        flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id)

        flow_api_obj = flow_plugin.ApiFlow().InitFromFlowObject(flow_obj)
        self.assertIsNotNone(flow_api_obj.progress)
        self.assertIsInstance(flow_api_obj.progress,
                              rdf_flow_objects.DefaultFlowProgress)
Beispiel #19
0
  def Run(self):
    client_id = self.SetupClient(0)
    flow_id = flow.StartFlow(
        flow_cls=processes.ListProcesses, client_id=client_id)

    self.Check(
        "CancelFlow",
        args=flow_plugin.ApiCancelFlowArgs(
            client_id=client_id, flow_id=flow_id),
        replace={flow_id: "W:ABCDEF"})
Beispiel #20
0
    def RunJob(self, job, force=False, token=None):
        """Does the actual work of the Cron, if the job is due to run.

    Args:
      job: The cronjob rdfvalue that should be run. Must be leased.
      force: If True, the job will run even if JobDueToRun() returns False.
      token: A datastore token.

    Raises:
      LockError: if the object is not locked.
    """
        if not job.leased_until or job.leased_until < rdfvalue.RDFDatetime.Now(
        ):
            raise LockError("CronJob must be leased for Run() to be called.")

        self.TerminateExpiredRun(job, token=token)

        # If currently running flow has finished, update our state.
        runs_base = rdfvalue.RDFURN("aff4:/cron").Add(job.job_id)
        if job.current_run_id:
            current_run_urn = runs_base.Add("F:%X" % job.current_run_id)
            current_flow = aff4.FACTORY.Open(current_run_urn, token=token)
            runner = current_flow.GetRunner()
            if not runner.IsRunning():
                if runner.context.state == "ERROR":
                    status = rdf_cronjobs.CronJobRunStatus.Status.ERROR
                    stats.STATS.IncrementCounter("cron_job_failure",
                                                 fields=[job.job_id])
                else:
                    status = rdf_cronjobs.CronJobRunStatus.Status.OK
                    elapsed = rdfvalue.RDFDatetime.Now() - job.last_run_time
                    stats.STATS.RecordEvent("cron_job_latency",
                                            elapsed.seconds,
                                            fields=[job.job_id])

                data_store.REL_DB.UpdateCronJob(job.job_id,
                                                last_run_status=status,
                                                current_run_id=None)

        if not force and not self.JobDueToRun(job):
            return

        # Make sure the flow is created with cron job as a parent folder.
        job.cron_args.flow_runner_args.base_session_id = runs_base

        flow_urn = flow.StartFlow(runner_args=job.cron_args.flow_runner_args,
                                  args=job.cron_args.flow_args,
                                  token=token,
                                  sync=False)

        job.current_run_id = int(flow_urn.Basename()[2:], 16)
        data_store.REL_DB.UpdateCronJob(
            job.job_id,
            last_run_time=rdfvalue.RDFDatetime.Now(),
            current_run_id=job.current_run_id)
Beispiel #21
0
def StartHuntFlowOnClient(client_id, hunt_id):
  """Starts a flow corresponding to a given hunt on a given client."""

  hunt_obj = data_store.REL_DB.ReadHuntObject(hunt_id)
  hunt_obj = CompleteHuntIfExpirationTimeReached(hunt_obj)
  # There may be a little race between foreman rules being removed and
  # foreman scheduling a client on an (already) paused hunt. Making sure
  # we don't lose clients in such a race by accepting clients for paused
  # hunts.
  if not rdf_hunt_objects.IsHuntSuitableForFlowProcessing(hunt_obj.hunt_state):
    return

  if hunt_obj.args.hunt_type == hunt_obj.args.HuntType.STANDARD:
    hunt_args = hunt_obj.args.standard

    if hunt_obj.client_rate > 0:
      # Given that we use caching in _GetNumClients and hunt_obj may be updated
      # in another process, we have to account for cases where num_clients_diff
      # may go below 0.
      num_clients_diff = max(
          0,
          _GetNumClients(hunt_obj.hunt_id) - hunt_obj.num_clients_at_start_time)
      next_client_due_msecs = int(
          num_clients_diff / hunt_obj.client_rate * 60e6)

      start_at = rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
          hunt_obj.last_start_time.AsMicrosecondsSinceEpoch() +
          next_client_due_msecs)
    else:
      start_at = None

    # TODO(user): remove client_rate support when AFF4 is gone.
    # In REL_DB always work as if client rate is 0.

    flow_cls = registry.FlowRegistry.FlowClassByName(hunt_args.flow_name)
    flow_args = hunt_args.flow_args if hunt_args.HasField("flow_args") else None
    flow.StartFlow(
        client_id=client_id,
        creator=hunt_obj.creator,
        cpu_limit=hunt_obj.per_client_cpu_limit,
        network_bytes_limit=hunt_obj.per_client_network_bytes_limit,
        flow_cls=flow_cls,
        flow_args=flow_args,
        start_at=start_at,
        parent_hunt_id=hunt_id)

    if hunt_obj.client_limit:
      if _GetNumClients(hunt_obj.hunt_id) >= hunt_obj.client_limit:
        PauseHunt(hunt_id)

  elif hunt_obj.args.hunt_type == hunt_obj.args.HuntType.VARIABLE:
    raise NotImplementedError()
  else:
    raise UnknownHuntTypeError("Can't determine hunt type when starting "
                               "hunt %s on client %s." % (client_id, hunt_id))
Beispiel #22
0
    def Handle(self, args, token=None):
        result = super(ApiCreateClientApprovalHandler,
                       self).Handle(args, token=token)

        if args.keep_client_alive:
            flow.StartFlow(client_id=str(args.client_id),
                           flow_cls=administrative.KeepAlive,
                           creator=token.username,
                           duration=3600)

        return result
Beispiel #23
0
  def testHandlerRaisesOnArbitraryFlowId(self):
    # Create a mock flow.
    flow_id = flow.StartFlow(
        client_id=self.client_id, flow_cls=discovery.Interrogate)

    args = vfs_plugin.ApiGetVfsFileContentUpdateStateArgs(
        client_id=self.client_id, operation_id=flow_id)

    # Our mock flow is not a MultiGetFile flow, so an error should be raised.
    with self.assertRaises(vfs_plugin.VfsFileContentUpdateNotFoundError):
      self.handler.Handle(args, context=self.context)
Beispiel #24
0
  def testHandlerThrowsExceptionOnArbitraryFlowId(self):
    # Create a mock flow.
    flow_id = flow.StartFlow(
        client_id=self.client_id, flow_cls=discovery.Interrogate)

    args = vfs_plugin.ApiGetVfsRefreshOperationStateArgs(
        client_id=self.client_id, operation_id=flow_id)

    # Our mock flow is not a RecursiveListFlow, so an error should be raised.
    with self.assertRaises(vfs_plugin.VfsRefreshOperationNotFoundError):
      self.handler.Handle(args, context=self.context)
Beispiel #25
0
    def testLeavesClientIdEmptyForNonClientBasedFlows(self):
        client_id = self.SetupClient(0)
        flow_urn = flow.StartFlow(client_id=client_id,
                                  flow_name=processes.ListProcesses.__name__,
                                  token=self.token)
        flow_obj = aff4.FACTORY.Open(flow_urn, token=self.token)
        flow_api_obj = flow_plugin.ApiFlow().InitFromAff4Object(
            flow_obj, flow_id=flow_urn.Basename())

        self.assertEquals(flow_api_obj.client_id,
                          client_plugin.ApiClientId(client_id))
Beispiel #26
0
    def testResolvesSimpleFlowURN(self):
        flow_urn = flow.StartFlow(
            flow_name=flow_test_lib.FlowWithOneNestedFlow.__name__,
            client_id=self.client_urn,
            token=self.token)
        flow_id = flow_plugin.ApiFlowId(flow_urn.Basename())

        self.assertEqual(
            flow_id.ResolveClientFlowURN(client_plugin.ApiClientId(
                self.client_urn),
                                         token=self.token), flow_urn)
    def testCheckingArbitraryFlowStateDoesNotWork(self):
        self.InitRouterConfig(self.__class__.FILE_FINDER_ROUTER_CONFIG %
                              self.token.username)
        flow_urn = flow.StartFlow(client_id=self.client_id,
                                  flow_name=file_finder.FileFinder.__name__,
                                  token=self.token)

        flow_ref = self.api.Client(client_id=self.client_id.Basename()).Flow(
            flow_urn.Basename())
        with self.assertRaises(RuntimeError):
            flow_ref.Get()
    def testGetFlowRaisesIfFlowWasNotCreatedBySameRouter(self):
        flow_urn = flow.StartFlow(client_id=self.client_id,
                                  flow_name=file_finder.FileFinder.__name__,
                                  token=self.token)

        router = self._CreateRouter(get_flow=rr.RobotRouterGetFlowParams(
            enabled=True))
        with self.assertRaises(access_control.UnauthorizedAccess):
            router.GetFlow(api_flow.ApiGetFlowArgs(
                client_id=self.client_id, flow_id=flow_urn.Basename()),
                           token=self.token)
Beispiel #29
0
    def testFlowWithoutFlowProgressTypeDoesNotReportProgress(self):
        client_id = self.SetupClient(0)
        flow_id = flow.StartFlow(client_id=client_id,
                                 flow_cls=flow_test_lib.DummyFlow)
        flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id)

        flow_api_obj = flow_plugin.ApiFlow().InitFromFlowObject(flow_obj)
        self.assertIsNone(flow_api_obj.progress)

        flow_api_obj = flow_plugin.ApiFlow().InitFromFlowObject(
            flow_obj, with_progress=True)
        self.assertIsNone(flow_api_obj.progress)
Beispiel #30
0
  def CallFlow(self,
               flow_name=None,
               next_state=None,
               request_data=None,
               client_id=None,
               base_session_id=None,
               **kwargs):
    """Creates a new flow and send its responses to a state.

    This creates a new flow. The flow may send back many responses which will be
    queued by the framework until the flow terminates. The final status message
    will cause the entire transaction to be committed to the specified state.

    Args:
       flow_name: The name of the flow to invoke.
       next_state: The state in this flow, that responses to this message should
         go to.
       request_data: Any dict provided here will be available in the
         RequestState protobuf. The Responses object maintains a reference to
         this protobuf for use in the execution of the state method. (so you can
         access this data by responses.request). There is no format mandated on
         this data but it may be a serialized protobuf.
       client_id: If given, the flow is started for this client.
       base_session_id: A URN which will be used to build a URN.
       **kwargs: Arguments for the child flow.

    Returns:
       The flow_id of the child flow which was created.

    Raises:
      ValueError: The requested next state does not exist.
    """
    if not getattr(self, next_state):
      raise ValueError("Next state %s is invalid." % next_state)

    flow_request = rdf_flow_objects.FlowRequest(
        client_id=self.rdf_flow.client_id,
        flow_id=self.rdf_flow.flow_id,
        request_id=self.GetNextOutboundId(),
        next_state=next_state)

    if request_data is not None:
      flow_request.request_data = rdf_protodict.Dict().FromDict(request_data)

    self.flow_requests.append(flow_request)

    flow_cls = registry.FlowRegistry.FlowClassByName(flow_name)

    flow.StartFlow(
        client_id=self.rdf_flow.client_id,
        flow_cls=flow_cls,
        parent_flow_obj=self,
        **kwargs)