def testPurgeClientStats(self): max_age = system.PurgeClientStats.MAX_AGE for t in [1 * max_age, 1.5 * max_age, 2 * max_age]: with test_lib.FakeTime(t): urn = self.client_id.Add("stats") stats_fd = aff4.FACTORY.Create(urn, "ClientStats", token=self.token, mode="rw") st = rdfvalue.ClientStats(RSS_size=int(t)) stats_fd.AddAttribute(stats_fd.Schema.STATS(st)) stats_fd.Close() stat_obj = aff4.FACTORY.Open( urn, age=aff4.ALL_TIMES, token=self.token, ignore_cache=True) stat_entries = list(stat_obj.GetValuesForAttribute(stat_obj.Schema.STATS)) self.assertEqual(len(stat_entries), 3) self.assertTrue(max_age in [e.RSS_size for e in stat_entries]) with test_lib.FakeTime(2.5 * max_age): for _ in test_lib.TestFlowHelper( "PurgeClientStats", None, client_id=self.client_id, token=self.token): pass stat_obj = aff4.FACTORY.Open( urn, age=aff4.ALL_TIMES, token=self.token, ignore_cache=True) stat_entries = list(stat_obj.GetValuesForAttribute(stat_obj.Schema.STATS)) self.assertEqual(len(stat_entries), 1) self.assertTrue(max_age not in [e.RSS_size for e in stat_entries])
def Run(self, arg): """Returns the client stats.""" if arg is None: arg = rdfvalue.GetClientStatsRequest() proc = psutil.Process(os.getpid()) meminfo = proc.memory_info() response = rdfvalue.ClientStats( RSS_size=meminfo[0], VMS_size=meminfo[1], memory_percent=proc.memory_percent(), bytes_received=stats.STATS.GetMetricValue( "grr_client_received_bytes"), bytes_sent=stats.STATS.GetMetricValue("grr_client_sent_bytes"), create_time=long(proc.create_time() * 1e6), boot_time=long(psutil.boot_time() * 1e6)) samples = self.grr_worker.stats_collector.cpu_samples for (timestamp, user, system, percent) in samples: if arg.start_time < timestamp < arg.end_time: sample = rdfvalue.CpuSample(timestamp=timestamp, user_cpu_time=user, system_cpu_time=system, cpu_percent=percent) response.cpu_samples.Append(sample) samples = self.grr_worker.stats_collector.io_samples for (timestamp, read_bytes, write_bytes) in samples: if arg.start_time < timestamp < arg.end_time: sample = rdfvalue.IOSample(timestamp=timestamp, read_bytes=read_bytes, write_bytes=write_bytes) response.io_samples.Append(sample) self.Send(response)
def DownSample(self, sampling_interval=60): """Downsamples the data to save space. Args: sampling_interval: The sampling interval in seconds. Returns: New ClientStats object with cpu and IO samples downsampled. """ result = rdfvalue.ClientStats(self) result.cpu_samples = self.DownsampleList(self.cpu_samples, sampling_interval) result.io_samples = self.DownsampleList(self.io_samples, sampling_interval) return result
def GetClientStats(self, _): """Fake get client stats method.""" response = rdfvalue.ClientStats() for i in range(12): sample = rdfvalue.CpuSample(timestamp=int(i * 10 * 1e6), user_cpu_time=10 + i, system_cpu_time=20 + i, cpu_percent=10 + i) response.cpu_samples.Append(sample) sample = rdfvalue.IOSample(timestamp=int(i * 10 * 1e6), read_bytes=10 + i, write_bytes=10 + i) response.io_samples.Append(sample) return [response]
def FillClientStats( client_id=rdfvalue.ClientURN("C.0000000000000001"), token=None): for minute in range(6): stats = rdfvalue.ClientStats() for i in range(minute * 60, (minute + 1) * 60): sample = rdfvalue.CpuSample(timestamp=int(i * 10 * 1e6), user_cpu_time=10 + i, system_cpu_time=20 + i, cpu_percent=10 + i) stats.cpu_samples.Append(sample) sample = rdfvalue.IOSample(timestamp=int(i * 10 * 1e6), read_bytes=10 + i, write_bytes=10 + i * 2) stats.io_samples.Append(sample) message = rdfvalue.GrrMessage(source=client_id, args=stats.SerializeToString()) flow.WellKnownFlow.GetAllWellKnownFlows( token=token)["Stats"].ProcessMessage(message)
def testProcessMessagesWellKnown(self): worker_obj = worker.GRRWorker(worker.DEFAULT_WORKER_QUEUE, token=self.token) # Send a message to a WellKnownFlow - ClientStatsAuto. client_id = rdfvalue.ClientURN("C.1100110011001100") self.SendResponse(rdfvalue.SessionID("aff4:/flows/W:Stats"), data=rdfvalue.ClientStats(RSS_size=1234), client_id=client_id, well_known=True) # Process all messages worker_obj.RunOnce() worker_obj.thread_pool.Join() client = aff4.FACTORY.Open(client_id.Add("stats"), token=self.token) stats = client.Get(client.Schema.STATS) self.assertEqual(stats.RSS_size, 1234) # Make sure no notifications have been sent. user = aff4.FACTORY.Open("aff4:/users/%s" % self.token.username, token=self.token) notifications = user.Get(user.Schema.PENDING_NOTIFICATIONS) self.assertIsNone(notifications)
def ProcessMessage(self, message): """Processes a stats response from the client.""" client_stats = rdfvalue.ClientStats(message.args) self.ProcessResponse(message.source, client_stats)
def Layout(self, request, response): self.client_id = rdfvalue.ClientURN(request.REQ.get("client_id")) self.client_actions = [] current_time = rdfvalue.RDFDatetime().Now() leased_tasks = [] with queue_manager.QueueManager(token=request.token) as manager: tasks = manager.Query(self.client_id.Queue(), limit=1000) for task in tasks: if task.eta > current_time: leased_tasks.append(task) flows_map = {} for flow_obj in aff4.FACTORY.MultiOpen( set(task.session_id for task in leased_tasks), mode="r", token=request.token): flows_map[flow_obj.urn] = flow_obj for task in leased_tasks: flow_obj = flows_map.get(task.session_id, None) if flow_obj: self.client_actions.append(dict( name=task.name, priority=str(task.priority), lease_time_left=str(task.eta - current_time), parent_flow=dict(name=flow_obj.Name(), urn=flow_obj.urn))) now = rdfvalue.RDFDatetime().Now() hour_before_now = now - rdfvalue.Duration("1h") stats_urn = self.client_id.Add("stats") stats_obj = aff4.FACTORY.Create( stats_urn, "ClientStats", mode="r", age=(hour_before_now.AsMicroSecondsFromEpoch(), now.AsMicroSecondsFromEpoch()), token=request.token) client_stats_list = list( stats_obj.GetValuesForAttribute(stats_obj.Schema.STATS)) cpu_samples = [] io_samples = [] for client_stats in client_stats_list: cpu_samples.extend(client_stats.cpu_samples) io_samples.extend(client_stats.io_samples) cpu_samples = sorted(cpu_samples, key=lambda x: x.timestamp) io_samples = sorted(io_samples, key=lambda x: x.timestamp) if client_stats_list: client_stats = client_stats_list[-1].Copy() else: client_stats = rdfvalue.ClientStats() client_stats.cpu_samples = cpu_samples client_stats.io_samples = io_samples if client_stats.cpu_samples: self.stats_timestamp = client_stats.cpu_samples[-1].timestamp elif client_stats.io_samples: self.stats_timestamp = client_stats.io_samples[-1].timestamp else: self.stats_timestamp = None user_cpu_data = [] system_cpu_data = [] for sample in client_stats.cpu_samples: if sample.timestamp > hour_before_now and sample.timestamp < now: user_cpu_data.append((sample.timestamp.AsSecondsFromEpoch() * 1000, sample.user_cpu_time)) system_cpu_data.append((sample.timestamp.AsSecondsFromEpoch() * 1000, sample.system_cpu_time)) read_bytes_data = [] write_bytes_data = [] read_count_data = [] write_count_data = [] for sample in client_stats.io_samples: if sample.timestamp > hour_before_now and sample.timestamp < now: read_bytes_data.append((sample.timestamp.AsSecondsFromEpoch() * 1000, sample.read_bytes)) write_bytes_data.append((sample.timestamp.AsSecondsFromEpoch() * 1000, sample.write_bytes)) read_count_data.append((sample.timestamp.AsSecondsFromEpoch() * 1000, sample.read_count)) write_count_data.append((sample.timestamp.AsSecondsFromEpoch() * 1000, sample.write_count)) response = super(ClientLoadView, self).Layout(request, response) return self.CallJavascript(response, "ClientLoadView.Layout", user_cpu_data=user_cpu_data, system_cpu_data=system_cpu_data, read_bytes_data=read_bytes_data, write_bytes_data=write_bytes_data, read_count_data=read_count_data, write_count_data=write_count_data)