def testSystemFlowsReportPlugin(self): with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14")): AddFakeAuditLog( action=rdf_events.AuditEvent.Action.RUN_FLOW, user="******", flow_name="Flow123", token=self.token) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/22")): for _ in range(10): AddFakeAuditLog( action=rdf_events.AuditEvent.Action.RUN_FLOW, user="******", flow_name="Flow123", token=self.token) AddFakeAuditLog( action=rdf_events.AuditEvent.Action.RUN_FLOW, user="******", flow_name="Flow456", token=self.token) report = report_plugins.GetReportByName( server_report_plugins.SystemFlowsReportPlugin.__name__) start = rdfvalue.RDFDatetime.FromHumanReadable("2012/12/15") month_duration = rdfvalue.Duration("30d") api_report_data = report.GetReportData( stats_api.ApiGetReportArgs( name=report.__class__.__name__, start_time=start, duration=month_duration), token=self.token) self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( x_ticks=[], data=[ rdf_report_plugins.ApiReportDataSeries2D( label=u"Flow123\u2003Run By: GRR (10)", points=[ rdf_report_plugins.ApiReportDataPoint2D(x=0, y=10) ]), rdf_report_plugins.ApiReportDataSeries2D( label=u"Flow456\u2003Run By: GRR (1)", points=[ rdf_report_plugins.ApiReportDataPoint2D(x=1, y=1) ]) ])))
def GetReportData(self, get_report_args, token): """Show how the last active breakdown evolved over time.""" report = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.LINE_CHART) series_with_timestamps = client_report_utils.FetchAllGraphSeries( get_report_args.client_label, rdf_stats.ClientGraphSeries.ReportType.GRR_VERSION, period=rdfvalue.DurationSeconds("180d")) categories = {} for timestamp, graph_series in sorted( iteritems(series_with_timestamps)): self._ProcessGraphSeries(graph_series, timestamp, categories) graphs = [] for k, v in iteritems(categories): graph = dict(label=k, data=v) graphs.append(graph) report.line_chart.data = sorted( (rdf_report_plugins.ApiReportDataSeries2D( label=label, points=(rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y) for x, y in points)) for label, points in iteritems(categories)), key=lambda series: series.label) return report
def GetReportData(self, get_report_args, token): ret = rdf_report_plugins.ApiReportData( representation_type=RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData(x_ticks=[])) counts = self._GetFlows(get_report_args, token) total_counts = collections.Counter( {flow: sum(cts.values()) for flow, cts in iteritems(counts)}) for i, (flow, total_count) in enumerate(total_counts.most_common()): topusercounts = counts[flow].most_common(3) topusers = ", ".join("{} ({})".format(user, count) for user, count in topusercounts) ret.stack_chart.data.append( rdf_report_plugins.ApiReportDataSeries2D( # \u2003 is an emspace, a long whitespace character. label="{}\u2003Run By: {}".format(flow, topusers), points=[ rdf_report_plugins.ApiReportDataPoint2D(x=i, y=total_count) ])) return ret
def GetReportData(self, get_report_args, token): ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData.RepresentationType. STACK_CHART) database = { rdfvalue.RDFDatetime.FromHumanReadable("2012/12/11"): (1, 0), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/12"): (2, 1), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/13"): (3, 2), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14"): (5, 3), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/15"): (8, 4), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/16"): (13, 5), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/17"): (21, 6), rdfvalue.RDFDatetime.FromHumanReadable("2012/12/18"): (34, 7) } ret.stack_chart.data = [ rdf_report_plugins.ApiReportDataSeries2D( label="Bar", points=[ rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y) for (t, (x, y)) in sorted(iteritems(database)) if get_report_args.start_time <= t and t < get_report_args.start_time + get_report_args.duration ]) ] return ret
def GetReportData(self, get_report_args, token): """Show how the last active breakdown evolved over time.""" report = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.LINE_CHART) series_with_timestamps = client_report_utils.FetchAllGraphSeries( get_report_args.client_label, rdf_stats.ClientGraphSeries.ReportType.N_DAY_ACTIVE, period=rdfvalue.Duration.From(180, rdfvalue.DAYS)) categories = {} for timestamp, graph_series in sorted( iteritems(series_with_timestamps)): self._ProcessGraphSeries(graph_series, timestamp, categories) series = [] for label, points in iteritems(categories): series.append( rdf_report_plugins.ApiReportDataSeries2D( label=label, points=(rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y) for x, y in points))) report.line_chart.data = sorted(series, key=lambda s: int(s.label.split()[0]), reverse=True) return report
def GetReportData(self, get_report_args, token): """Filter the last week of user actions.""" ret = rdf_report_plugins.ApiReportData( representation_type=RepresentationType.STACK_CHART) now = rdfvalue.RDFDatetime.Now() weeks = range(-self.WEEKS, 0, 1) week_duration = rdfvalue.Duration("7d") user_activity = collections.defaultdict( lambda: {week: 0 for week in weeks}) for username, timestamp in self._LoadUserActivity(token): week = (timestamp - now).seconds // week_duration.seconds if week in user_activity[username]: user_activity[username][week] += 1 user_activity = sorted(iteritems(user_activity)) user_activity = [(user, data) for user, data in user_activity if user not in aff4_users.GRRUser.SYSTEM_USERS] ret.stack_chart.data = [ rdf_report_plugins.ApiReportDataSeries2D( label=user, points=(rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y) for x, y in sorted(data.items()))) for user, data in user_activity ] return ret
def GetReportData(self, get_report_args, token): ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData(x_ticks=[])) # TODO(user): move the calculation to a cronjob and store results in # AFF4. try: timerange_offset = get_report_args.duration timerange_end = get_report_args.start_time + timerange_offset # Store run count total and per-user counts = {} try: for event in report_utils.GetAuditLogEntries( timerange_offset, timerange_end, token): if (event.action == rdf_events.AuditEvent.Action.RUN_FLOW and self.UserFilter(event.user)): counts.setdefault(event.flow_name, { "total": 0, event.user: 0 }) counts[event.flow_name]["total"] += 1 counts[event.flow_name].setdefault(event.user, 0) counts[event.flow_name][event.user] += 1 except ValueError: # Couldn't find any logs.. pass for i, (flow, countdict) in enumerate( sorted(counts.iteritems(), key=lambda x: x[1]["total"], reverse=True)): total_count = countdict["total"] countdict.pop("total") topusercounts = sorted(countdict.iteritems(), key=operator.itemgetter(1), reverse=True)[:3] topusers = ", ".join("%s (%s)" % (user, count) for user, count in topusercounts) ret.stack_chart.data.append( rdf_report_plugins.ApiReportDataSeries2D( # \u2003 is an emspace, a long whitespace character. label=u"%s\u2003Run By: %s" % (flow, topusers), points=[ rdf_report_plugins.ApiReportDataPoint2D( x=i, y=total_count) ])) except IOError: pass return ret
def GetReportData(self, get_report_args, token): """Report file frequency by client count.""" x_ticks = [] for e in range(15): x = 32**e x_ticks.append( rdf_report_plugins.ApiReportTickSpecifier( x=self._Log(x), label=self._BytesToHumanReadable(x))) ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( x_ticks=x_ticks, bar_width=.2)) data = () try: fd = aff4.FACTORY.Open("aff4:/stats/FileStoreStats", token=token) graph = fd.Get(aff4_stats.FilestoreStats.SchemaCls. FILESTORE_FILESIZE_HISTOGRAM) if graph: data = graph.data except (IOError, TypeError): pass xs = [point.x_value for point in data] ys = [point.y_value for point in data] labels = [ "%s - %s" % (self._BytesToHumanReadable( int(x0)), self._BytesToHumanReadable(int(x1))) for x0, x1 in itertools.izip(xs[:-1], xs[1:]) ] last_x = data[-1].x_value labels.append( # \u221E is the infinity sign. u"%s - \u221E" % self._BytesToHumanReadable(int(last_x))) ret.stack_chart.data = (rdf_report_plugins.ApiReportDataSeries2D( label=label, points=[ rdf_report_plugins.ApiReportDataPoint2D(x=self._Log(x), y=y) ]) for label, x, y in itertools.izip(labels, xs, ys)) return ret
def GetReportData(self, get_report_args, token): """Show how the last active breakdown evolved over time.""" ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.LINE_CHART) try: # now end_time = int(time.time() * 1e6) # half a year ago start_time = end_time - (60 * 60 * 24 * 1000000 * 180) fd = aff4.FACTORY.Open( rdfvalue.RDFURN("aff4:/stats/ClientFleetStats").Add( get_report_args.client_label), token=token, age=(start_time, end_time)) categories = {} for graph_series in fd.GetValuesForAttribute( aff4_stats.ClientFleetStats.SchemaCls. LAST_CONTACTED_HISTOGRAM): self._ProcessGraphSeries(graph_series, categories) graphs = [] for k, v in categories.items(): graph = dict(label=k, data=v) graphs.append(graph) ret.line_chart.data = sorted( (rdf_report_plugins.ApiReportDataSeries2D( label=label, points=(rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y) for x, y in points)) for label, points in categories.iteritems()), key=lambda series: int(series.label.split()[0]), reverse=True) except IOError: pass return ret
def GetReportData(self, get_report_args, token): """Filter the last week of user actions.""" ret = rdf_report_plugins.ApiReportData( representation_type=RepresentationType.STACK_CHART) week_duration = rdfvalue.Duration.From(7, rdfvalue.DAYS) num_weeks = int( math.ceil( rdfvalue.Duration(get_report_args.duration).ToFractional( rdfvalue.SECONDS) / week_duration.ToFractional(rdfvalue.SECONDS))) weeks = range(0, num_weeks) start_time = get_report_args.start_time end_time = start_time + num_weeks * week_duration user_activity = collections.defaultdict( lambda: {week: 0 for week in weeks}) entries = self._LoadUserActivity(start_time=get_report_args.start_time, end_time=end_time, token=token) for username, timestamp, count in entries: week = (timestamp - start_time).ToInt( rdfvalue.SECONDS) // week_duration.ToInt(rdfvalue.SECONDS) if week in user_activity[username]: user_activity[username][week] += count user_activity = sorted(iteritems(user_activity)) user_activity = [(user, data) for user, data in user_activity if user not in access_control.SYSTEM_USERS] ret.stack_chart.data = [ rdf_report_plugins.ApiReportDataSeries2D( label=user, points=(rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y) for x, y in sorted(data.items()))) for user, data in user_activity ] return ret
def GetReportData(self, get_report_args, token): """Filter the last week of user actions.""" ret = rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART) try: user_activity = {} week_duration = rdfvalue.Duration("7d") offset = rdfvalue.Duration("%dw" % self.WEEKS) now = rdfvalue.RDFDatetime.Now() start_time = now - offset - audit.AUDIT_ROLLOVER_TIME try: for fd in audit.LegacyAuditLogsForTimespan( start_time, now, token): for event in fd.GenerateItems(): for week in range(self.__class__.WEEKS): start = now - week * week_duration if start < event.timestamp < (start + week_duration): weekly_activity = user_activity.setdefault( event.user, [[x, 0] for x in range( -self.__class__.WEEKS, 0, 1)]) weekly_activity[-week][1] += 1 except ValueError: # Couldn't find any logs.. pass ret.stack_chart.data = sorted( (rdf_report_plugins.ApiReportDataSeries2D( label=user, points=(rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y) for x, y in data)) for user, data in iteritems(user_activity) if user not in aff4_users.GRRUser.SYSTEM_USERS), key=lambda series: series.label) except IOError: pass return ret
def testUserActivityReportPlugin(self): with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14")): AddFakeAuditLog("Fake audit description 14 Dec.", "C.123", "User123", token=self.token) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/22")): for _ in xrange(10): AddFakeAuditLog("Fake audit description 22 Dec.", "C.123", "User123", token=self.token) AddFakeAuditLog("Fake audit description 22 Dec.", "C.456", "User456", token=self.token) report = report_plugins.GetReportByName( server_report_plugins.UserActivityReportPlugin.__name__) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/31")): api_report_data = report.GetReportData( stats_api.ApiGetReportArgs(name=report.__class__.__name__), token=self.token) # pyformat: disable self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( representation_type=rdf_report_plugins.ApiReportData. RepresentationType.STACK_CHART, stack_chart=rdf_report_plugins.ApiStackChartReportData( data=[ rdf_report_plugins.ApiReportDataSeries2D( label=u"User123", points=[ rdf_report_plugins.ApiReportDataPoint2D( x=-10, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-9, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-8, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-7, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-6, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-5, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-4, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-3, y=1), rdf_report_plugins.ApiReportDataPoint2D( x=-2, y=10), rdf_report_plugins.ApiReportDataPoint2D( x=-1, y=0) ]), rdf_report_plugins.ApiReportDataSeries2D( label=u"User456", points=[ rdf_report_plugins.ApiReportDataPoint2D( x=-10, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-9, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-8, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-7, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-6, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-5, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-4, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-3, y=0), rdf_report_plugins.ApiReportDataPoint2D( x=-2, y=1), rdf_report_plugins.ApiReportDataPoint2D( x=-1, y=0) ]) ])))