def collect(self): flow_obj = self.get_flow_object(self.plugin_args.flow_id) with self._get_collection(flow_obj.client_id) as collection: yield dict(divider="Flow Object (%s)" % flow_obj.__class__.__name__) for x in self._explain(flow_obj, ignore_fields=set(["ticket", "actions"])): yield x for row in collection.query(flow_id=self.plugin_args.flow_id): ticket = flow.FlowStatus.from_json(row["ticket_data"], session=self.session) yield dict(divider="Flow Status Ticket") for x in self._explain(ticket, ignore_fields=set([ "location", "client_id", "flow_id", "collections" ])): yield x if ticket.collections: yield dict(divider="Collections") for collection in ticket.collections: link = renderers.UILink( "gs", collection.location.get_canonical().to_path()) yield dict(Field=collection.__class__.__name__, Value=link, Description="", nowrap=True) if ticket.files: yield dict(divider="Uploads") for upload in ticket.files: link = renderers.UILink( "gs", upload.get_canonical().to_path()) yield dict(Value=link, nowrap=True) if ticket.error: yield dict(divider="Error") yield dict(Field="ticket.error", Value=ticket.error) if ticket.backtrace: yield dict(divider="Backtrace") yield dict(Field="ticket.backtrace", Value=ticket.backtrace)
def collect(self): if not self.client_id: raise plugin.PluginError("Client ID must be specified.") with flow.FlowStatsCollection.load_from_location( self._config.server.flow_db_for_server(self.client_id), session=self.session) as collection: rows = list(self.collect_db(collection)) common.THREADPOOL.map(self._check_pending_flow, rows) for row in rows: row["collections"] = [ renderers.UILink("gs", x) for x in row["collections"]] row["flow_id"] = renderers.UILink("f", row["flow_id"]) yield row
def collect(self): with flow.FlowStatsCollection.load_from_location( self._config.server.flow_db_for_server( queue=self.plugin_args.queue), session=self.session) as collection: for row in self.collect_db(collection): row["flow_id"] = renderers.UILink("h", row["flow_id"]) yield row
def collect(self): location = self._config.server.location_from_path_for_server( self.plugin_args.path) for stat in location.list_files(max_results=self.plugin_args.limit): yield dict(Name=renderers.UILink("gs", stat.location.to_path()), Size=stat.size, Created=stat.created)
def collect(self): path = self.plugin_args.path # Allow path to be a vfs link. if path.startswith("vfs:"): path = path[4:] launch_flow_plugin = self.session.plugins.launch_flow( flow="FileFinderFlow", args=dict(globs=[path], download=True)) flow_obj = launch_flow_plugin.make_flow_object() yield dict(Message="Launching flow to fetch %s" % path) for ticket in self.launch_and_wait(flow_obj): for upload in ticket.files: link = renderers.UILink("gs", upload.get_canonical().to_path()) yield dict(Message=link, nowrap=True)
def collect(self): path = utils.normpath(self.plugin_args.path) self.collections = {} if not self.client_id: raise plugin.PluginError("Client ID expected.") # If the user asks for fresh data then launch the flow and wait for it # to finish. if self.plugin_args.refresh: flow_obj = self.session.plugins.launch_flow( flow="ListDirectory", args=dict( path=path, recursive=self.plugin_args.recursive, )).make_flow_object() # Wait until the list directory is completed. self.launch_and_wait(flow_obj) # First get the VFS index. vfs_index = find.VFSIndex.load_from_location( self._config.server.vfs_index_for_server(self.client_id), session=self.session) # We use the index to get the best StatEntryCollection() which covers # the requested path. There are three possible cases: # 1) All the existing StatEntryCollection()s start at a directory deeper # than path. In this case we emulate the directories of all existing # collections' starting paths. # 2) The requested path begins with the starting path of one or more # StatEntryCollection()s. This means these collections contain it. # 3) path is longer than all StatEntryCollection()'s starting paths # plus their depth. path_components = filter(None, path.split("/")) for row in self._collect_one_dir(vfs_index, path_components): row["Path"] = renderers.UILink("vfs", row["Path"]) yield row
def collect(self): collection = self._get_collection() flow_obj = self.get_flow_object(self.plugin_args.flow_id) if self.plugin_args.graph_clients: self.graph_clients(collection) yield dict(divider="Flow Object (%s)" % flow_obj.__class__.__name__) for x in self._explain(flow_obj, ignore_fields=set([ "ticket", "actions" ])): yield x yield dict(divider="Summary") yield dict(Field="Total Clients", Value=list(collection.query( "select count(*) as c from tbl_default" ))[0]["c"]) yield dict(Field="Successful Clients", Value=list(collection.query( "select count(*) as c from tbl_default " "where status = 'Done'"))[0]["c"]) yield dict(Field="Errors Clients", Value=list(collection.query( "select count(*) as c from tbl_default " "where status = 'Error'"))[0]["c"]) total = 0 yield dict(divider="Results") for row in collection.query( status="Done", limit=self.plugin_args.limit): ticket = flow.FlowStatus.from_json(row["ticket_data"], session=self.session) for result in ticket.collections: if total > self.plugin_args.limit: break yield dict(Field=ticket.client_id, Time=ticket.timestamp, Value=renderers.UILink( "gs", result.location.to_path()), nowrap=True) total += 1 yield dict(divider="Uploads") total = 0 for row in collection.query( status="Done", limit=self.plugin_args.limit): ticket = flow.FlowStatus.from_json(row["ticket_data"], session=self.session) for result in ticket.files: if total > self.plugin_args.limit: break yield dict(Field=ticket.client_id, Time=ticket.timestamp, Value=renderers.UILink( "gs", result.to_path()), nowrap=True) total += 1 for row in collection.query( status="Error", limit=self.plugin_args.limit): ticket = flow.FlowStatus.from_json(row["ticket_data"], session=self.session) yield dict(Field=ticket.client_id, Time=ticket.timestamp, Value=ticket.error, nowrap=True)