def _WrapContentGeneratorWithMappings(self, generator, mappings, args, context=None): flow_ref = rdf_objects.FlowReference( client_id=args.client_id, flow_id=args.flow_id) object_reference = rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.FLOW, flow=flow_ref) try: for item in generator.Generate(mappings): yield item notification.Notify( context.username, rdf_objects.UserNotification.Type.TYPE_FILE_ARCHIVE_GENERATED, "Downloaded archive of flow %s from client %s (archived %d files, " "archive size is %d)" % (args.flow_id, args.client_id, generator.num_archived_files, generator.output_size), object_reference) except Exception as e: notification.Notify( context.username, rdf_objects.UserNotification.Type.TYPE_FILE_ARCHIVE_GENERATION_FAILED, "Archive generation failed for flow %s on client %s: %s" % (args.flow_id, args.client_id, e), object_reference) raise
def _WrapContentGenerator(self, generator, flow_results, args, token=None): flow_ref = rdf_objects.FlowReference(client_id=args.client_id, flow_id=args.flow_id) object_reference = rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.FLOW, flow=flow_ref) try: for item in generator.Generate(flow_results): yield item notification.Notify( token.username, rdf_objects.UserNotification.Type.TYPE_FILE_ARCHIVE_GENERATED, "Downloaded archive of flow %s from client %s (archived %d " "out of %d items, archive size is %d)" % (args.flow_id, args.client_id, len(generator.archived_files), generator.total_files, generator.output_size), object_reference) except Exception as e: notification.Notify( token.username, rdf_objects.UserNotification.Type. TYPE_FILE_ARCHIVE_GENERATION_FAILED, "Archive generation failed for flow %s on client %s: %s" % (args.flow_id, args.client_id, e), object_reference) raise
def _SanitizeApiCreateFlowArgs( args: ApiCreateFlowArgs ) -> Tuple[Type[flow_base.FlowBase], rdf_flow_runner.FlowRunnerArgs]: """Validates and sanitizes args for flow scheduling and starting.""" if not args.client_id: raise ValueError("client_id must be provided") runner_args = args.flow.runner_args.Copy() flow_name = args.flow.name if not flow_name: flow_name = runner_args.flow_name if not flow_name: raise RuntimeError("Flow name is not specified.") # Clear all fields marked with HIDDEN, except for output_plugins - they are # marked HIDDEN, because we have a separate UI for them, not because they # shouldn't be shown to the user at all. # # TODO(user): Refactor the code to remove the HIDDEN label from # FlowRunnerArgs.output_plugins. runner_args.ClearFieldsWithLabel( rdf_structs.SemanticDescriptor.Labels.HIDDEN, exceptions="output_plugins") if args.original_flow: runner_args.original_flow = rdf_objects.FlowReference( flow_id=str(args.original_flow.flow_id), client_id=str(args.original_flow.client_id)) flow_cls = registry.FlowRegistry.FlowClassByName(flow_name) return flow_cls, runner_args
def Handle(self, args, token=None): if not args.client_id: raise ValueError("client_id must be provided") flow_name = args.flow.name if not flow_name: flow_name = args.flow.runner_args.flow_name if not flow_name: raise RuntimeError("Flow name is not specified.") # Clear all fields marked with HIDDEN, except for output_plugins - they are # marked HIDDEN, because we have a separate UI for them, not because they # shouldn't be shown to the user at all. # # TODO(user): Refactor the code to remove the HIDDEN label from # FlowRunnerArgs.output_plugins. args.flow.runner_args.ClearFieldsWithLabel( rdf_structs.SemanticDescriptor.Labels.HIDDEN, exceptions="output_plugins") if args.original_flow: args.flow.runner_args.original_flow = rdf_objects.FlowReference( flow_id=utils.SmartStr(args.original_flow.flow_id), client_id=utils.SmartStr(args.original_flow.client_id)) flow_id = flow.StartFlow(client_id=args.client_id.ToClientURN(), flow_name=flow_name, token=token, args=args.flow.args, runner_args=args.flow.runner_args) fd = aff4.FACTORY.Open(flow_id, aff4_type=flow.GRRFlow, token=token) return ApiFlow().InitFromAff4Object(fd, flow_id=flow_id.Basename())
def Run(self): ref = rdf_hunts.FlowLikeObjectReference( object_type="FLOW_REFERENCE", flow_reference=rdf_objects.FlowReference( flow_id="F:332211", client_id="C.1111111111111111")) if data_store.RelationalDBReadEnabled("hunts"): with test_lib.FakeTime(42): hunt_id = self.CreateHunt(description="the hunt", original_object=ref) hunt_obj = data_store.REL_DB.ReadHuntObject(hunt_id) hunt_obj.client_resources_stats.user_cpu_stats.sum = 5000 hunt_obj.client_resources_stats.network_bytes_sent_stats.sum = 1000000 data_store.REL_DB.WriteHuntObject(hunt_obj) else: with test_lib.FakeTime(42): with self.CreateHunt(description="the hunt", original_object=ref) as hunt_obj: hunt_id = hunt_obj.urn.Basename() hunt_stats = hunt_obj.context.usage_stats hunt_stats.user_cpu_stats.sum = 5000 hunt_stats.network_bytes_sent_stats.sum = 1000000 self.Check("GetHunt", args=hunt_plugin.ApiGetHuntArgs(hunt_id=hunt_id), replace={hunt_id: "H:123456"})
def testFlowFailureNotificationIsParsedCorrectly(self): n = self.InitFromObj_( rdf_objects.UserNotification.Type.TYPE_FLOW_RUN_FAILED, rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.FLOW, flow=rdf_objects.FlowReference( client_id=self.client_id.Basename(), flow_id="F:123456"))) self.assertEqual(n.reference.type, "FLOW") self.assertEqual(n.reference.flow.client_id.ToClientURN(), self.client_id) self.assertEqual(n.reference.flow.flow_id, "F:123456")
def testFlowSuccessNotificationIsParsedCorrectly(self): n = self.InitFromObj_( rdf_objects.UserNotification.Type.TYPE_FLOW_RUN_COMPLETED, rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.FLOW, flow=rdf_objects.FlowReference( client_id=self.client_id, flow_id="F:123456"))) self.assertEqual(n.reference.type, "FLOW") self.assertEqual(n.reference.flow.client_id.ToString(), self.client_id) self.assertEqual(n.reference.flow.flow_id, "F:123456")
def GenerateNotifications(cls, client_id, token): """Generates fake notifications of different notification types.""" session_id = flow_test_lib.StartFlow(discovery.Interrogate, client_id=client_id, creator=token.username) notification.Notify( token.username, rdf_objects.UserNotification.Type.TYPE_CLIENT_INTERROGATED, "Fake discovery message", rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.CLIENT, client=rdf_objects.ClientReference( client_id=client_id.Basename()))) # ViewObject: VirtualFileSystem notification.Notify( token.username, rdf_objects.UserNotification.Type.TYPE_VFS_FILE_COLLECTED, "File fetch completed", rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.VFS_FILE, vfs_file=rdf_objects.VfsFileReference( client_id=client_id.Basename(), path_type=rdf_objects.PathInfo.PathType.OS, path_components=["proc", "10", "exe"]))) gui_test_lib.CreateFileVersion(client_id, "fs/os/proc/10/exe", b"", timestamp=gui_test_lib.TIME_0, token=token) # ViewObject: Flow notification.Notify( token.username, rdf_objects.UserNotification.Type.TYPE_FLOW_RUN_COMPLETED, "Fake view flow message", rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.FLOW, flow=rdf_objects.FlowReference(client_id=client_id.Basename(), flow_id=session_id))) # FlowError if data_store.RelationalDBEnabled(): flow_base.TerminateFlow(client_id.Basename(), session_id, "Fake flow error") else: with aff4.FACTORY.Open(client_id.Add("flows").Add(session_id), mode="rw", token=token) as flow_obj: flow_obj.GetRunner().Error("Fake flow error") return session_id
def NotifyCreatorOfError(self): if self.ShouldSendNotifications(): client_id = self.rdf_flow.client_id flow_id = self.rdf_flow.flow_id flow_ref = rdf_objects.FlowReference(client_id=client_id, flow_id=flow_id) notification_lib.Notify( self.creator, rdf_objects.UserNotification.Type.TYPE_FLOW_RUN_FAILED, "Flow %s on %s terminated due to error" % (flow_id, client_id), rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.FLOW, flow=flow_ref))
def Handle(self, args, token=None): if not args.client_id: raise ValueError("client_id must be provided") flow_name = args.flow.name if not flow_name: flow_name = args.flow.runner_args.flow_name if not flow_name: raise RuntimeError("Flow name is not specified.") # Clear all fields marked with HIDDEN, except for output_plugins - they are # marked HIDDEN, because we have a separate UI for them, not because they # shouldn't be shown to the user at all. # # TODO(user): Refactor the code to remove the HIDDEN label from # FlowRunnerArgs.output_plugins. args.flow.runner_args.ClearFieldsWithLabel( rdf_structs.SemanticDescriptor.Labels.HIDDEN, exceptions="output_plugins") if args.original_flow: args.flow.runner_args.original_flow = rdf_objects.FlowReference( flow_id=utils.SmartStr(args.original_flow.flow_id), client_id=utils.SmartStr(args.original_flow.client_id)) if data_store.RelationalDBFlowsEnabled(): flow_cls = registry.FlowRegistry.FlowClassByName(flow_name) flow_id = flow.StartFlow( client_id=str(args.client_id), cpu_limit=args.flow.runner_args.cpu_limit, creator=token.username, flow_args=args.flow.args, flow_cls=flow_cls, network_bytes_limit=args.flow.runner_args.network_bytes_limit, original_flow=args.flow.runner_args.original_flow, output_plugins=args.flow.runner_args.output_plugins, parent_flow_obj=None, ) # TODO(amoser): read back and convert to ApiFlow. return ApiFlow(flow_id=flow_id) else: flow_id = flow.StartAFF4Flow( client_id=args.client_id.ToClientURN(), flow_name=flow_name, token=token, args=args.flow.args, runner_args=args.flow.runner_args) fd = aff4.FACTORY.Open(flow_id, aff4_type=flow.GRRFlow, token=token) return ApiFlow().InitFromAff4Object(fd, flow_id=flow_id.Basename())
def Handle(self, args, token=None): if not args.client_id: raise ValueError("client_id must be provided") runner_args = args.flow.runner_args flow_name = args.flow.name if not flow_name: flow_name = runner_args.flow_name if not flow_name: raise RuntimeError("Flow name is not specified.") # Clear all fields marked with HIDDEN, except for output_plugins - they are # marked HIDDEN, because we have a separate UI for them, not because they # shouldn't be shown to the user at all. # # TODO(user): Refactor the code to remove the HIDDEN label from # FlowRunnerArgs.output_plugins. runner_args.ClearFieldsWithLabel( rdf_structs.SemanticDescriptor.Labels.HIDDEN, exceptions="output_plugins") if args.original_flow: runner_args.original_flow = rdf_objects.FlowReference( flow_id=str(args.original_flow.flow_id), client_id=str(args.original_flow.client_id)) flow_cls = registry.FlowRegistry.FlowClassByName(flow_name) cpu_limit = None if runner_args.HasField("cpu_limit"): cpu_limit = runner_args.cpu_limit network_bytes_limit = None if runner_args.HasField("network_bytes_limit"): network_bytes_limit = runner_args.network_bytes_limit flow_id = flow.StartFlow( client_id=str(args.client_id), cpu_limit=cpu_limit, creator=token.username, flow_args=args.flow.args, flow_cls=flow_cls, network_bytes_limit=network_bytes_limit, original_flow=runner_args.original_flow, output_plugins=runner_args.output_plugins, parent_flow_obj=None, ) flow_obj = data_store.REL_DB.ReadFlowObject(str(args.client_id), flow_id) res = ApiFlow().InitFromFlowObject(flow_obj) res.context = None return res
def Run(self): ref = rdf_hunts.FlowLikeObjectReference( object_type="FLOW_REFERENCE", flow_reference=rdf_objects.FlowReference( flow_id="F:332211", client_id="C.1111111111111111")) # TODO(user): make hunt stats non-zero when AFF4 is gone to # improve test coverage. with test_lib.FakeTime(42): hunt_id = self.CreateHunt(description="the hunt", original_object=ref) self.Check("GetHunt", args=hunt_plugin.ApiGetHuntArgs(hunt_id=hunt_id), replace={hunt_id: "H:123456"})
def NotifyAboutEnd(self): """Send out a final notification about the end of this flow.""" flow_ref = None if self.runner_args.client_id: flow_ref = rdf_objects.FlowReference( client_id=self.client_id, flow_id=self.urn.Basename()) num_results = len(self.ResultCollection()) notification_lib.Notify( self.creator, rdf_objects.UserNotification.Type.TYPE_FLOW_RUN_COMPLETED, "Flow %s completed with %d %s" % (self.__class__.__name__, num_results, num_results == 1 and "result" or "results"), rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.FLOW, flow=flow_ref))
def NotifyAboutEnd(self): # Sum up number of replies to write with the number of already # written results. num_results = ( len(self.replies_to_write) + data_store.REL_DB.CountFlowResults( self.rdf_flow.client_id, self.rdf_flow.flow_id)) flow_ref = rdf_objects.FlowReference( client_id=self.rdf_flow.client_id, flow_id=self.rdf_flow.flow_id) notification_lib.Notify( self.creator, rdf_objects.UserNotification.Type.TYPE_FLOW_RUN_COMPLETED, "Flow %s completed with %d %s" % (self.__class__.__name__, num_results, num_results == 1 and "result" or "results"), rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.FLOW, flow=flow_ref))
def Error(self, backtrace, client_id=None, status_code=None): """Terminates this flow with an error.""" try: self.queue_manager.DestroyFlowStates(self.session_id) except queue_manager.MoreDataException: pass if not self.IsRunning(): return # Set an error status reply = rdf_flows.GrrStatus() if status_code is None: reply.status = rdf_flows.GrrStatus.ReturnedStatus.GENERIC_ERROR else: reply.status = status_code client_id = client_id or self.runner_args.client_id if backtrace: reply.error_message = backtrace logging.error("Error in flow %s (%s). Trace: %s", self.session_id, client_id, backtrace) self.context.backtrace = backtrace else: logging.error("Error in flow %s (%s).", self.session_id, client_id) self._SendTerminationMessage(reply) self.context.state = rdf_flow_runner.FlowContext.State.ERROR if self.ShouldSendNotifications(): flow_ref = None if client_id: flow_ref = rdf_objects.FlowReference( client_id=client_id.Basename(), flow_id=self.session_id.Basename()) notification_lib.Notify( self.token.username, rdf_objects.UserNotification.Type.TYPE_FLOW_RUN_FAILED, "Flow (%s) terminated due to error" % self.session_id, rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.FLOW, flow=flow_ref)) self.flow_obj.Flush()
def GenerateNotifications(cls, client_id, username): """Generates fake notifications of different notification types.""" session_id = flow_test_lib.StartFlow(discovery.Interrogate, client_id=client_id, creator=username) notification.Notify( username, rdf_objects.UserNotification.Type.TYPE_CLIENT_INTERROGATED, "Fake discovery message", rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.CLIENT, client=rdf_objects.ClientReference(client_id=client_id))) # ViewObject: VirtualFileSystem notification.Notify( username, rdf_objects.UserNotification.Type.TYPE_VFS_FILE_COLLECTED, "File fetch completed", rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.VFS_FILE, vfs_file=rdf_objects.VfsFileReference( client_id=client_id, path_type=rdf_objects.PathInfo.PathType.OS, path_components=["proc", "10", "exe"]))) gui_test_lib.CreateFileVersion(client_id, "fs/os/proc/10/exe", b"", timestamp=gui_test_lib.TIME_0) # ViewObject: Flow notification.Notify( username, rdf_objects.UserNotification.Type.TYPE_FLOW_RUN_COMPLETED, "Fake view flow message", rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.FLOW, flow=rdf_objects.FlowReference(client_id=client_id, flow_id=session_id))) # FlowError flow_base.TerminateFlow(client_id, session_id, "Fake flow error") return session_id
def Run(self): with test_lib.FakeTime(42): ref = rdf_hunts.FlowLikeObjectReference( object_type="FLOW_REFERENCE", flow_reference=rdf_objects.FlowReference( flow_id="F:332211", client_id="C.1111111111111111")) with self.CreateHunt(description="the hunt", original_object=ref) as hunt_obj: hunt_urn = hunt_obj.urn hunt_stats = hunt_obj.context.usage_stats hunt_stats.user_cpu_stats.sum = 5000 hunt_stats.network_bytes_sent_stats.sum = 1000000 self.Check( "GetHunt", args=hunt_plugin.ApiGetHuntArgs(hunt_id=hunt_urn.Basename()), replace={hunt_urn.Basename(): "H:123456"})
def FromFlowIdAndClientId(cls, flow_id, client_id): res = FlowLikeObjectReference() res.object_type = "FLOW_REFERENCE" res.flow_reference = rdf_objects.FlowReference(flow_id=flow_id, client_id=client_id) return res