def testUserFlowsReportPlugin(self): with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14")): AddFakeAuditLog( action=events.AuditEvent.Action.RUN_FLOW, user="******", flow_name="Flow123", token=self.token) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/22")): for _ in xrange(10): AddFakeAuditLog( action=events.AuditEvent.Action.RUN_FLOW, user="******", flow_name="Flow123", token=self.token) AddFakeAuditLog( action=events.AuditEvent.Action.RUN_FLOW, user="******", flow_name="Flow456", token=self.token) report = report_plugins.GetReportByName( server_report_plugins.UserFlowsReportPlugin.__name__) start = rdfvalue.RDFDatetime.FromHumanReadable("2012/12/15") month_duration = rdfvalue.Duration("30d") api_report_data = report.GetReportData( stats_api.ApiGetReportArgs( name=report.__class__.__name__, start_time=start, duration=month_duration), token=self.token) self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( x_ticks=[], data=[ rdf_report_plugins.ApiReportDataSeries2D( label=u"Flow123\u2003Run By: User123 (10)", points=[ rdf_report_plugins.ApiReportDataPoint2D( x=0, y=10) ]), rdf_report_plugins.ApiReportDataSeries2D( label=u"Flow456\u2003Run By: User456 (1)", points=[ rdf_report_plugins.ApiReportDataPoint2D( x=1, y=1) ]) ])))
def GetReportData(self, get_report_args, token): ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART) database = { rdfvalue.RDFDatetime.FromHumanReadable("2012/12/11"): (1, 0), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/12"): (2, 1), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/13"): (3, 2), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14"): (5, 3), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/15"): (8, 4), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/16"): (13, 5), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/17"): (21, 6), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/18"): (34, 7) } ret.stack_chart.data = [ rdf_report_plugins.ApiReportDataSeries2D( label="Bar", points=[ rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y) for (t, (x, y)) in sorted(database.iteritems()) if get_report_args.start_time <= t and t < get_report_args.start_time + get_report_args.duration ]) ] return ret
def testFileClientCountReportPlugin(self): filename = "winexec_img.dd" client_id, = self.SetupClients(1) # Add a file to be reported. filestore_test.HashFileStoreTest.AddFileToFileStore( rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, filename)), client_id=client_id, token=self.token) # Scan for files to be reported (the one we just added). for _ in test_lib.TestFlowHelper( filestore_stats.FilestoreStatsCronFlow.__name__, token=self.token): pass report = report_plugins.GetReportByName( filestore_report_plugins.FileClientCountReportPlugin.__name__) api_report_data = report.GetReportData( stats_api.ApiGetReportArgs(name=report.__class__.__name__), token=self.token) # pyformat: disable self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData(data=[ rdf_report_plugins.ApiReportDataSeries2D( label=u"0", points=[rdf_report_plugins.ApiReportDataPoint2D(x=0, y=0)] ), rdf_report_plugins.ApiReportDataSeries2D( label=u"1", points=[rdf_report_plugins.ApiReportDataPoint2D(x=1, y=1)] ), rdf_report_plugins.ApiReportDataSeries2D( label=u"5", points=[rdf_report_plugins.ApiReportDataPoint2D(x=5, y=0)] ), rdf_report_plugins.ApiReportDataSeries2D( label=u"10", points=[rdf_report_plugins.ApiReportDataPoint2D(x=10, y=0)] ), rdf_report_plugins.ApiReportDataSeries2D( label=u"20", points=[rdf_report_plugins.ApiReportDataPoint2D(x=20, y=0)] ), rdf_report_plugins.ApiReportDataSeries2D( label=u"50", points=[rdf_report_plugins.ApiReportDataPoint2D(x=50, y=0)] ), rdf_report_plugins.ApiReportDataSeries2D( label=u"100", points=[rdf_report_plugins.ApiReportDataPoint2D(x=100, y=0)] ) ])))
def GetReportData(self, get_report_args, token): ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData(x_ticks=[])) # TODO(user): move the calculation to a cronjob and store results in # AFF4. try: timerange_offset = get_report_args.duration timerange_end = get_report_args.start_time + timerange_offset # Store run count total and per-user counts = {} try: for event in report_utils.GetAuditLogEntries( timerange_offset, timerange_end, token): if (event.action == events.AuditEvent.Action.RUN_FLOW and self.UserFilter(event.user)): counts.setdefault(event.flow_name, { "total": 0, event.user: 0 }) counts[event.flow_name]["total"] += 1 counts[event.flow_name].setdefault(event.user, 0) counts[event.flow_name][event.user] += 1 except ValueError: # Couldn't find any logs.. pass for i, (flow, countdict) in enumerate( sorted(counts.iteritems(), key=lambda x: x[1]["total"], reverse=True)): total_count = countdict["total"] countdict.pop("total") topusercounts = sorted(countdict.iteritems(), key=operator.itemgetter(1), reverse=True)[:3] topusers = ", ".join("%s (%s)" % (user, count) for user, count in topusercounts) ret.stack_chart.data.append( rdf_report_plugins.ApiReportDataSeries2D( # \u2003 is an emspace, a long whitespace character. label=u"%s\u2003Run By: %s" % (flow, topusers), points=[ rdf_report_plugins.ApiReportDataPoint2D( x=i, y=total_count) ])) except IOError: pass return ret
def GetReportData(self, get_report_args, token): """Report file frequency by client count.""" x_ticks = [] for e in xrange(15): x = 32**e x_ticks.append( rdf_report_plugins.ApiReportTickSpecifier( x=self._Log(x), label=self._BytesToHumanReadable(x))) ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( x_ticks=x_ticks, bar_width=.2)) data = () try: fd = aff4.FACTORY.Open("aff4:/stats/FileStoreStats", token=token) graph = fd.Get(aff4_stats.FilestoreStats.SchemaCls. FILESTORE_FILESIZE_HISTOGRAM) if graph: data = graph.data except (IOError, TypeError): pass xs = [point.x_value for point in data] ys = [point.y_value for point in data] labels = [ "%s - %s" % (self._BytesToHumanReadable( int(x0)), self._BytesToHumanReadable(int(x1))) for x0, x1 in itertools.izip(xs[:-1], xs[1:]) ] last_x = data[-1].x_value labels.append( # \u221E is the infinity sign. u"%s - \u221E" % self._BytesToHumanReadable(int(last_x))) ret.stack_chart.data = (rdf_report_plugins.ApiReportDataSeries2D( label=label, points=[ rdf_report_plugins.ApiReportDataPoint2D(x=self._Log(x), y=y) ]) for label, x, y in itertools.izip(labels, xs, ys)) return ret
def GetReportData(self, get_report_args, token): """Show how the last active breakdown evolved over time.""" ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.LINE_CHART) try: # now end_time = int(time.time() * 1e6) # half a year ago start_time = end_time - (60 * 60 * 24 * 1000000 * 180) fd = aff4.FACTORY.Open( rdfvalue.RDFURN("aff4:/stats/ClientFleetStats").Add( get_report_args.client_label), token=token, age=(start_time, end_time)) categories = {} for graph_series in fd.GetValuesForAttribute( aff4_stats.ClientFleetStats.SchemaCls. LAST_CONTACTED_HISTOGRAM): self._ProcessGraphSeries(graph_series, categories) graphs = [] for k, v in categories.items(): graph = dict(label=k, data=v) graphs.append(graph) ret.line_chart.data = sorted( (rdf_report_plugins.ApiReportDataSeries2D( label=label, points=(rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y) for x, y in points)) for label, points in categories.iteritems()), key=lambda series: int(series.label.split()[0]), reverse=True) except IOError: pass return ret
def GetReportData(self, get_report_args, token): """Filter the last week of user actions.""" ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART) try: user_activity = {} week_duration = rdfvalue.Duration("7d") offset = rdfvalue.Duration("%dw" % self.WEEKS) now = rdfvalue.RDFDatetime.Now() # TODO(user): Why is the rollover not a duration? start_time = now - offset - rdfvalue.Duration( config.CONFIG["Logging.aff4_audit_log_rollover"]) try: for fd in audit.AuditLogsForTimespan(start_time, now, token): for event in fd.GenerateItems(): for week in xrange(self.__class__.WEEKS): start = now - week * week_duration if start < event.timestamp < (start + week_duration): weekly_activity = user_activity.setdefault( event.user, [[x, 0] for x in xrange( -self.__class__.WEEKS, 0, 1)]) weekly_activity[-week][1] += 1 except ValueError: # Couldn't find any logs.. pass ret.stack_chart.data = sorted( (rdf_report_plugins.ApiReportDataSeries2D( label=user, points=(rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y) for x, y in data)) for user, data in user_activity.iteritems() if user not in aff4_users.GRRUser.SYSTEM_USERS), key=lambda series: series.label) except IOError: pass return ret
def GetReportData(self, get_report_args, token): """Report file frequency by client count.""" ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART) try: fd = aff4.FACTORY.Open("aff4:/stats/FileStoreStats", token=token) graph = fd.Get(aff4_stats.FilestoreStats.SchemaCls. FILESTORE_CLIENTCOUNT_HISTOGRAM) data = graph.data if graph else () ret.stack_chart.data = ( rdf_report_plugins.ApiReportDataSeries2D( label=str(point.x_value), points=(rdf_report_plugins.ApiReportDataPoint2D( x=point.x_value, y=point.y_value), ) # 1-elem tuple ) for point in data) except (IOError, TypeError): pass return ret
def testUserActivityReportPlugin(self): with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14")): AddFakeAuditLog("Fake audit description 14 Dec.", "C.123", "User123", token=self.token) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/22")): for _ in xrange(10): AddFakeAuditLog("Fake audit description 22 Dec.", "C.123", "User123", token=self.token) AddFakeAuditLog("Fake audit description 22 Dec.", "C.456", "User456", token=self.token) report = report_plugins.GetReportByName( server_report_plugins.UserActivityReportPlugin.__name__) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/31")): api_report_data = report.GetReportData( stats_api.ApiGetReportArgs(name=report.__class__.__name__), token=self.token) # pyformat: disable self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( data=[ rdf_report_plugins.ApiReportDataSeries2D( label=u"User123", points=[ rdf_report_plugins.ApiReportDataPoint2D( x=-10, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-9, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-8, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-7, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-6, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-5, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-4, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-3, y=1), rdf_report_plugins.ApiReportDataPoint2D( x=-2, y=10), rdf_report_plugins.ApiReportDataPoint2D( x=-1, y=0) ]), rdf_report_plugins.ApiReportDataSeries2D( label=u"User456", points=[ rdf_report_plugins.ApiReportDataPoint2D( x=-10, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-9, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-8, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-7, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-6, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-5, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-4, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-3, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-2, y=1), rdf_report_plugins.ApiReportDataPoint2D( x=-1, y=0) ]) ])))