def GetReportData(self, get_report_args, token): ret = rdf_report_plugins.ApiReportData( representation_type=RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData(x_ticks=[])) counts = self._GetFlows(get_report_args, token) total_counts = collections.Counter( {flow: sum(cts.values()) for flow, cts in iteritems(counts)}) for i, (flow, total_count) in enumerate(total_counts.most_common()): topusercounts = counts[flow].most_common(3) topusers = ", ".join("{} ({})".format(user, count) for user, count in topusercounts) ret.stack_chart.data.append( rdf_report_plugins.ApiReportDataSeries2D( # \u2003 is an emspace, a long whitespace character. label="{}\u2003Run By: {}".format(flow, topusers), points=[ rdf_report_plugins.ApiReportDataPoint2D(x=i, y=total_count) ])) return ret
def testUserFlowsReportPlugin(self): client_id = self.SetupClient(1) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14")): AddFakeAuditLog(user="******") data_store.REL_DB.WriteFlowObject( rdf_flow_objects.Flow(flow_class_name="GetClientStats", creator="User123", client_id=client_id, flow_id="E0000000", create_time=rdfvalue.RDFDatetime.Now())) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/22")): for i in range(10): data_store.REL_DB.WriteFlowObject( rdf_flow_objects.Flow( flow_class_name="GetClientStats", creator="User123", client_id=client_id, flow_id="{:08X}".format(i), create_time=rdfvalue.RDFDatetime.Now())) AddFakeAuditLog(user="******") data_store.REL_DB.WriteFlowObject( rdf_flow_objects.Flow(flow_class_name="ArtifactCollectorFlow", creator="User456", client_id=client_id, flow_id="F0000000", create_time=rdfvalue.RDFDatetime.Now())) AddFakeAuditLog(user="******") report = report_plugins.GetReportByName( server_report_plugins.UserFlowsReportPlugin.__name__) start = rdfvalue.RDFDatetime.FromHumanReadable("2012/12/15") month_duration = rdfvalue.Duration.From(30, rdfvalue.DAYS) api_report_data = report.GetReportData(stats_api.ApiGetReportArgs( name=report.__class__.__name__, start_time=start, duration=month_duration), token=self.token) self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( x_ticks=[], data=[ rdf_report_plugins.ApiReportDataSeries2D( label=u"GetClientStats\u2003Run By: User123 (10)", points=[ApiReportDataPoint2D(x=0, y=10)]), rdf_report_plugins.ApiReportDataSeries2D( label= u"ArtifactCollectorFlow\u2003Run By: User456 (1)", points=[ApiReportDataPoint2D(x=1, y=1)]) ])))
def testUserActivityReportPlugin(self): with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14")): AddFakeAuditLog(user="******", token=self.token) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/22")): for _ in range(10): AddFakeAuditLog(user="******", token=self.token) AddFakeAuditLog(user="******", token=self.token) report = report_plugins.GetReportByName( server_report_plugins.UserActivityReportPlugin.__name__) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/31")): api_report_data = report.GetReportData( stats_api.ApiGetReportArgs(name=report.__class__.__name__), token=self.token) self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( data=[ rdf_report_plugins.ApiReportDataSeries2D( label=u"User123", points=[ ApiReportDataPoint2D(x=-10, y=0), ApiReportDataPoint2D(x=-9, y=0), ApiReportDataPoint2D(x=-8, y=0), ApiReportDataPoint2D(x=-7, y=0), ApiReportDataPoint2D(x=-6, y=0), ApiReportDataPoint2D(x=-5, y=0), ApiReportDataPoint2D(x=-4, y=0), ApiReportDataPoint2D(x=-3, y=1), ApiReportDataPoint2D(x=-2, y=10), ApiReportDataPoint2D(x=-1, y=0) ]), rdf_report_plugins.ApiReportDataSeries2D( label=u"User456", points=[ ApiReportDataPoint2D(x=-10, y=0), ApiReportDataPoint2D(x=-9, y=0), ApiReportDataPoint2D(x=-8, y=0), ApiReportDataPoint2D(x=-7, y=0), ApiReportDataPoint2D(x=-6, y=0), ApiReportDataPoint2D(x=-5, y=0), ApiReportDataPoint2D(x=-4, y=0), ApiReportDataPoint2D(x=-3, y=0), ApiReportDataPoint2D(x=-2, y=1), ApiReportDataPoint2D(x=-1, y=0) ]) ])))
def testSystemFlowsReportPlugin(self): with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14")): AddFakeAuditLog( action=rdf_events.AuditEvent.Action.RUN_FLOW, user="******", flow_name="Flow123", token=self.token) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/22")): for _ in range(10): AddFakeAuditLog( action=rdf_events.AuditEvent.Action.RUN_FLOW, user="******", flow_name="Flow123", token=self.token) AddFakeAuditLog( action=rdf_events.AuditEvent.Action.RUN_FLOW, user="******", flow_name="Flow456", token=self.token) report = report_plugins.GetReportByName( server_report_plugins.SystemFlowsReportPlugin.__name__) start = rdfvalue.RDFDatetime.FromHumanReadable("2012/12/15") month_duration = rdfvalue.Duration("30d") api_report_data = report.GetReportData( stats_api.ApiGetReportArgs( name=report.__class__.__name__, start_time=start, duration=month_duration), token=self.token) self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( x_ticks=[], data=[ rdf_report_plugins.ApiReportDataSeries2D( label=u"Flow123\u2003Run By: GRR (10)", points=[ rdf_report_plugins.ApiReportDataPoint2D(x=0, y=10) ]), rdf_report_plugins.ApiReportDataSeries2D( label=u"Flow456\u2003Run By: GRR (1)", points=[ rdf_report_plugins.ApiReportDataPoint2D(x=1, y=1) ]) ])))
def GetReportData(self, get_report_args, token): ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData(x_ticks=[])) # TODO(user): move the calculation to a cronjob and store results in # AFF4. try: timerange_offset = get_report_args.duration timerange_end = get_report_args.start_time + timerange_offset # Store run count total and per-user counts = {} try: for event in report_utils.GetAuditLogEntries( timerange_offset, timerange_end, token): if (event.action == rdf_events.AuditEvent.Action.RUN_FLOW and self.UserFilter(event.user)): counts.setdefault(event.flow_name, { "total": 0, event.user: 0 }) counts[event.flow_name]["total"] += 1 counts[event.flow_name].setdefault(event.user, 0) counts[event.flow_name][event.user] += 1 except ValueError: # Couldn't find any logs.. pass for i, (flow, countdict) in enumerate( sorted(counts.iteritems(), key=lambda x: x[1]["total"], reverse=True)): total_count = countdict["total"] countdict.pop("total") topusercounts = sorted(countdict.iteritems(), key=operator.itemgetter(1), reverse=True)[:3] topusers = ", ".join("%s (%s)" % (user, count) for user, count in topusercounts) ret.stack_chart.data.append( rdf_report_plugins.ApiReportDataSeries2D( # \u2003 is an emspace, a long whitespace character. label=u"%s\u2003Run By: %s" % (flow, topusers), points=[ rdf_report_plugins.ApiReportDataPoint2D( x=i, y=total_count) ])) except IOError: pass return ret
def testUserActivityReportPluginWithNoActivityToReport(self): report = report_plugins.GetReportByName( server_report_plugins.UserActivityReportPlugin.__name__) api_report_data = report.GetReportData( stats_api.ApiGetReportArgs(name=report.__class__.__name__), token=self.token) self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData(data=[])))
def testUserActivityReportPlugin(self): entries = { "2012/12/02": ["User123"], "2012/12/07": ["User123"], "2012/12/15": ["User123"] * 2 + ["User456"], "2012/12/23": ["User123"] * 10, "2012/12/28": ["User123"], } for date_string, usernames in entries.items(): with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable(date_string)): for username in usernames: AddFakeAuditLog(user=username) report = report_plugins.GetReportByName( server_report_plugins.UserActivityReportPlugin.__name__) # Use 15 days which will be rounded up to 3 full weeks. duration = rdfvalue.DurationSeconds.FromDays(15) start_time = rdfvalue.RDFDatetime.FromHumanReadable("2012/12/07") api_report_data = report.GetReportData( stats_api.ApiGetReportArgs( name=report.__class__.__name__, start_time=start_time, duration=duration), token=self.token) self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData(data=[ rdf_report_plugins.ApiReportDataSeries2D( label=u"User123", points=[ ApiReportDataPoint2D(x=0, y=1), ApiReportDataPoint2D(x=1, y=2), ApiReportDataPoint2D(x=2, y=10), ]), rdf_report_plugins.ApiReportDataSeries2D( label=u"User456", points=[ ApiReportDataPoint2D(x=0, y=0), ApiReportDataPoint2D(x=1, y=1), ApiReportDataPoint2D(x=2, y=0), ]) ])))
def GetReportData(self, get_report_args, token): """Report file frequency by client count.""" x_ticks = [] for e in range(15): x = 32**e x_ticks.append( rdf_report_plugins.ApiReportTickSpecifier( x=self._Log(x), label=self._BytesToHumanReadable(x))) ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( x_ticks=x_ticks, bar_width=.2)) data = () try: fd = aff4.FACTORY.Open("aff4:/stats/FileStoreStats", token=token) graph = fd.Get(aff4_stats.FilestoreStats.SchemaCls. FILESTORE_FILESIZE_HISTOGRAM) if graph: data = graph.data except (IOError, TypeError): pass xs = [point.x_value for point in data] ys = [point.y_value for point in data] labels = [ "%s - %s" % (self._BytesToHumanReadable( int(x0)), self._BytesToHumanReadable(int(x1))) for x0, x1 in itertools.izip(xs[:-1], xs[1:]) ] last_x = data[-1].x_value labels.append( # \u221E is the infinity sign. u"%s - \u221E" % self._BytesToHumanReadable(int(last_x))) ret.stack_chart.data = (rdf_report_plugins.ApiReportDataSeries2D( label=label, points=[ rdf_report_plugins.ApiReportDataPoint2D(x=self._Log(x), y=y) ]) for label, x, y in itertools.izip(labels, xs, ys)) return ret
def testUserFlowsReportPluginWithNoActivityToReport(self): report = report_plugins.GetReportByName( server_report_plugins.UserFlowsReportPlugin.__name__) now = rdfvalue.RDFDatetime().Now() month_duration = rdfvalue.Duration.From(30, rdfvalue.DAYS) api_report_data = report.GetReportData( stats_api.ApiGetReportArgs(name=report.__class__.__name__, start_time=now - month_duration, duration=month_duration)) self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( x_ticks=[])))
def testUserActivityReportPluginWithNoActivityToReport(self): report = report_plugins.GetReportByName( server_report_plugins.UserActivityReportPlugin.__name__) duration = rdfvalue.Duration.From(14, rdfvalue.DAYS) start_time = rdfvalue.RDFDatetime.Now() - duration api_report_data = report.GetReportData( stats_api.ApiGetReportArgs(name=report.__class__.__name__, start_time=start_time, duration=duration)) self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( data=[])))
def testSystemFlowsReportPlugin(self): client_id = self.SetupClient(1).Basename() with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14")): if data_store.RelationalDBWriteEnabled(): data_store.REL_DB.WriteFlowObject( rdf_flow_objects.Flow( flow_class_name="GetClientStats", creator="GRR", client_id=client_id, flow_id="0000000B", create_time=rdfvalue.RDFDatetime.Now())) AddFakeAuditLog( action=Action.RUN_FLOW, user="******", flow_name="GetClientStats", token=self.token) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/22")): for i in range(10): if data_store.RelationalDBWriteEnabled(): data_store.REL_DB.WriteFlowObject( rdf_flow_objects.Flow( flow_class_name="GetClientStats", creator="GRR", client_id=client_id, flow_id="{:08X}".format(i), create_time=rdfvalue.RDFDatetime.Now())) AddFakeAuditLog( action=Action.RUN_FLOW, user="******", flow_name="GetClientStats", token=self.token) if data_store.RelationalDBWriteEnabled(): data_store.REL_DB.WriteFlowObject( rdf_flow_objects.Flow( flow_class_name="ArtifactCollectorFlow", creator="GRR", client_id=client_id, flow_id="0000000A", create_time=rdfvalue.RDFDatetime.Now())) AddFakeAuditLog( action=Action.RUN_FLOW, user="******", flow_name="ArtifactCollectorFlow", token=self.token) report = report_plugins.GetReportByName( server_report_plugins.SystemFlowsReportPlugin.__name__) start = rdfvalue.RDFDatetime.FromHumanReadable("2012/12/15") month_duration = rdfvalue.Duration("30d") api_report_data = report.GetReportData( stats_api.ApiGetReportArgs( name=report.__class__.__name__, start_time=start, duration=month_duration), token=self.token) self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( x_ticks=[], data=[ rdf_report_plugins.ApiReportDataSeries2D( label=u"GetClientStats\u2003Run By: GRR (10)", points=[ApiReportDataPoint2D(x=0, y=10)]), rdf_report_plugins.ApiReportDataSeries2D( label=u"ArtifactCollectorFlow\u2003Run By: GRR (1)", points=[ApiReportDataPoint2D(x=1, y=1)]) ])))