def _testProcessMessagesWellKnown(self): worker_obj = self._TestWorker() # Send a message to a WellKnownFlow - ClientStatsAuto. session_id = administrative.GetClientStatsAuto.well_known_session_id client_id = self.SetupClient(100) if data_store.RelationalDBReadEnabled(): done = threading.Event() def handle(l): worker_obj._ProcessMessageHandlerRequests(l) done.set() data_store.REL_DB.RegisterMessageHandler( handle, worker_obj.well_known_flow_lease_time, limit=1000) data_store.REL_DB.WriteMessageHandlerRequests([ rdf_objects.MessageHandlerRequest( client_id=client_id.Basename(), handler_name="StatsHandler", request_id=12345, request=rdf_client_stats.ClientStats(RSS_size=1234)) ]) self.assertTrue(done.wait(10)) else: self.SendResponse(session_id, data=rdf_client_stats.ClientStats(RSS_size=1234), client_id=client_id, well_known=True) # Process all messages worker_obj.RunOnce() worker_obj.thread_pool.Join() if data_store.RelationalDBReadEnabled(): results = data_store.REL_DB.ReadClientStats( client_id=client_id.Basename(), min_timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0), max_timestamp=rdfvalue.RDFDatetime.Now()) self.assertLen(results, 1) stats = results[0] else: client = aff4.FACTORY.Open(client_id.Add("stats"), token=self.token) stats = client.Get(client.Schema.STATS) self.assertEqual(stats.RSS_size, 1234) # Make sure no notifications have been sent. user = aff4.FACTORY.Open("aff4:/users/%s" % self.token.username, token=self.token) notifications = user.Get(user.Schema.PENDING_NOTIFICATIONS) self.assertIsNone(notifications) if data_store.RelationalDBReadEnabled(): data_store.REL_DB.UnregisterMessageHandler(timeout=60)
def testPurgeClientStats(self): client_id = test_lib.TEST_CLIENT_ID max_age = system.PurgeClientStats.MAX_AGE for t in [1 * max_age, 1.5 * max_age, 2 * max_age]: with test_lib.FakeTime(t): urn = client_id.Add("stats") stats_fd = aff4.FACTORY.Create(urn, aff4_stats.ClientStats, token=self.token, mode="rw") st = rdf_client_stats.ClientStats(RSS_size=int(t)) stats_fd.AddAttribute(stats_fd.Schema.STATS(st)) stats_fd.Close() stat_obj = aff4.FACTORY.Open(urn, age=aff4.ALL_TIMES, token=self.token) stat_entries = list( stat_obj.GetValuesForAttribute(stat_obj.Schema.STATS)) self.assertEqual(len(stat_entries), 3) self.assertTrue(max_age in [e.RSS_size for e in stat_entries]) with test_lib.FakeTime(2.5 * max_age): self._RunPurgeClientStats() stat_obj = aff4.FACTORY.Open(urn, age=aff4.ALL_TIMES, token=self.token) stat_entries = list( stat_obj.GetValuesForAttribute(stat_obj.Schema.STATS)) self.assertEqual(len(stat_entries), 1) self.assertTrue(max_age not in [e.RSS_size for e in stat_entries])
def testPurgeClientStats(self): client_id = test_lib.TEST_CLIENT_ID max_age = db.CLIENT_STATS_RETENTION.ToInt(rdfvalue.SECONDS) for t in [1 * max_age, 1.5 * max_age, 2 * max_age]: with test_lib.FakeTime(t): st = rdf_client_stats.ClientStats(RSS_size=int(t)) data_store.REL_DB.WriteClientStats(client_id, st) stat_entries = data_store.REL_DB.ReadClientStats( client_id=client_id, min_timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0)) self.assertCountEqual([1 * max_age, 1.5 * max_age, 2 * max_age], [e.RSS_size for e in stat_entries]) with test_lib.FakeTime(2.51 * max_age): self._RunPurgeClientStats() stat_entries = data_store.REL_DB.ReadClientStats( client_id=client_id, min_timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0)) self.assertLen(stat_entries, 1) self.assertNotIn(max_age, [e.RSS_size for e in stat_entries])
def ReadClientStats(self, client_id, min_timestamp, max_timestamp): """Reads ClientStats for a given client and time range.""" results = [] for timestamp, stats in iteritems(self.client_stats[client_id]): if min_timestamp <= timestamp <= max_timestamp: results.append(rdf_client_stats.ClientStats(stats)) return results
def Run(self, arg): """Returns the client stats.""" if arg is None: arg = rdf_client_action.GetClientStatsRequest() proc = psutil.Process(os.getpid()) meminfo = proc.memory_info() boot_time = rdfvalue.RDFDatetime.FromSecondsSinceEpoch( psutil.boot_time()) create_time = rdfvalue.RDFDatetime.FromSecondsSinceEpoch( proc.create_time()) response = rdf_client_stats.ClientStats( RSS_size=meminfo.rss, VMS_size=meminfo.vms, memory_percent=proc.memory_percent(), bytes_received=communicator.GRR_CLIENT_RECEIVED_BYTES.GetValue(), bytes_sent=communicator.GRR_CLIENT_SENT_BYTES.GetValue(), create_time=create_time, boot_time=boot_time) response.cpu_samples = self.grr_worker.stats_collector.CpuSamplesBetween( start_time=arg.start_time, end_time=arg.end_time) response.io_samples = self.grr_worker.stats_collector.IOSamplesBetween( start_time=arg.start_time, end_time=arg.end_time) self.Send(response)
def testWellKnownFlowResponsesAreProcessedOnlyOnce(self): worker_obj = self._TestWorker() # Send a message to a WellKnownFlow - ClientStatsAuto. client_id = rdf_client.ClientURN("C.1100110011001100") self.SendResponse(rdfvalue.SessionID(queue=queues.STATS, flow_name="Stats"), data=rdf_client_stats.ClientStats(RSS_size=1234), client_id=client_id, well_known=True) # Process all messages worker_obj.RunOnce() worker_obj.thread_pool.Join() client = aff4.FACTORY.Open(client_id.Add("stats"), token=self.token) stats = client.Get(client.Schema.STATS) self.assertEqual(stats.RSS_size, 1234) aff4.FACTORY.Delete(client_id.Add("stats"), token=self.token) # Process all messages once again - there should be no actual processing # done, as all the responses were processed last time. worker_obj.RunOnce() worker_obj.thread_pool.Join() # Check that stats haven't changed as no new responses were processed. client = aff4.FACTORY.Open(client_id.Add("stats"), token=self.token) self.assertIsNone(client.Get(client.Schema.STATS))
def FillClientStats(self, client_id): with aff4.FACTORY.Create(client_id.Add("stats"), aff4_type=aff4_stats.ClientStats, token=self.token, mode="rw") as stats_fd: for i in range(6): with test_lib.FakeTime((i + 1) * 10): timestamp = int((i + 1) * 10 * 1e6) st = rdf_client_stats.ClientStats() sample = rdf_client_stats.CpuSample(timestamp=timestamp, user_cpu_time=10 + i, system_cpu_time=20 + i, cpu_percent=10 + i) st.cpu_samples.Append(sample) sample = rdf_client_stats.IOSample(timestamp=timestamp, read_bytes=10 + i, write_bytes=10 + i * 2) st.io_samples.Append(sample) stats_fd.AddAttribute(stats_fd.Schema.STATS(st)) if data_store.RelationalDBWriteEnabled(): data_store.REL_DB.WriteClientStats( client_id=client_id.Basename(), stats=st)
def _testProcessMessagesWellKnown(self): worker_obj = self._TestWorker() # Send a message to a WellKnownFlow - ClientStatsAuto. session_id = administrative.GetClientStatsAuto.well_known_session_id client_id = rdf_client.ClientURN("C.1100110011001100") if data_store.RelationalDBReadEnabled(category="message_handlers"): done = threading.Event() def handle(l): worker_obj._ProcessMessageHandlerRequests(l) done.set() data_store.REL_DB.RegisterMessageHandler( handle, worker_obj.well_known_flow_lease_time, limit=1000) self.SendResponse(session_id, data=rdf_client_stats.ClientStats(RSS_size=1234), client_id=client_id, well_known=True) self.assertTrue(done.wait(10)) else: self.SendResponse(session_id, data=rdf_client_stats.ClientStats(RSS_size=1234), client_id=client_id, well_known=True) # Process all messages worker_obj.RunOnce() worker_obj.thread_pool.Join() client = aff4.FACTORY.Open(client_id.Add("stats"), token=self.token) stats = client.Get(client.Schema.STATS) self.assertEqual(stats.RSS_size, 1234) # Make sure no notifications have been sent. user = aff4.FACTORY.Open("aff4:/users/%s" % self.token.username, token=self.token) notifications = user.Get(user.Schema.PENDING_NOTIFICATIONS) self.assertIsNone(notifications) if data_store.RelationalDBReadEnabled(category="message_handlers"): data_store.REL_DB.UnregisterMessageHandler()
def WriteClientStats(self, client_id, stats): """Stores a ClientStats instance.""" if client_id not in collection.Flatten(self.ReadAllClientIDs()): raise db.UnknownClientError(client_id) if stats.timestamp is None: stats.timestamp = rdfvalue.RDFDatetime.Now() copy = rdf_client_stats.ClientStats(stats) self.client_stats[client_id][copy.timestamp] = copy
def ReadClientStats( self, client_id: Text, min_timestamp: rdfvalue.RDFDatetime, max_timestamp: rdfvalue.RDFDatetime ) -> List[rdf_client_stats.ClientStats]: """Reads ClientStats for a given client and time range.""" results = [] for timestamp, stats in self.client_stats[client_id].items(): if min_timestamp <= timestamp <= max_timestamp: results.append(rdf_client_stats.ClientStats(stats)) return results
def testPurgeClientStats(self): client_id = test_lib.TEST_CLIENT_ID max_age = db.CLIENT_STATS_RETENTION.seconds for t in [1 * max_age, 1.5 * max_age, 2 * max_age]: with test_lib.FakeTime(t): urn = client_id.Add("stats") stats_fd = aff4.FACTORY.Create(urn, aff4_stats.ClientStats, token=self.token, mode="rw") st = rdf_client_stats.ClientStats(RSS_size=int(t)) stats_fd.AddAttribute(stats_fd.Schema.STATS(st)) stats_fd.Close() if data_store.RelationalDBWriteEnabled(): data_store.REL_DB.WriteClientStats(client_id.Basename(), st) if data_store.RelationalDBReadEnabled("client_stats"): stat_entries = data_store.REL_DB.ReadClientStats( client_id=client_id.Basename(), min_timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0)) else: stat_obj = aff4.FACTORY.Open(urn, age=aff4.ALL_TIMES, token=self.token) stat_entries = list( stat_obj.GetValuesForAttribute(stat_obj.Schema.STATS)) self.assertCountEqual([1 * max_age, 1.5 * max_age, 2 * max_age], [e.RSS_size for e in stat_entries]) with test_lib.FakeTime(2.51 * max_age): self._RunPurgeClientStats() if data_store.RelationalDBReadEnabled("client_stats"): stat_entries = data_store.REL_DB.ReadClientStats( client_id=client_id.Basename(), min_timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0)) else: stat_obj = aff4.FACTORY.Open(urn, age=aff4.ALL_TIMES, token=self.token) stat_entries = list( stat_obj.GetValuesForAttribute(stat_obj.Schema.STATS)) self.assertLen(stat_entries, 1) self.assertNotIn(max_age, [e.RSS_size for e in stat_entries])
def GetClientStats(self, _): """Fake get client stats method.""" response = rdf_client_stats.ClientStats() for i in range(12): sample = rdf_client_stats.CpuSample(timestamp=int(i * 10 * 1e6), user_cpu_time=10 + i, system_cpu_time=20 + i, cpu_percent=10 + i) response.cpu_samples.Append(sample) sample = rdf_client_stats.IOSample(timestamp=int(i * 10 * 1e6), read_bytes=10 + i, write_bytes=10 + i) response.io_samples.Append(sample) return [response]
def FillClientStats(self, client_id): stats = [] for i in range(6): timestamp = int((i + 1) * 10 * 1e6) st = rdf_client_stats.ClientStats() sample = rdf_client_stats.CpuSample( timestamp=timestamp, user_cpu_time=10 + i, system_cpu_time=20 + i, cpu_percent=10 + i) st.cpu_samples.Append(sample) sample = rdf_client_stats.IOSample( timestamp=timestamp, read_bytes=10 + i, write_bytes=10 + i * 2) st.io_samples.Append(sample) stats.append(st) for st in stats: with test_lib.FakeTime(st.cpu_samples[0].timestamp): data_store.REL_DB.WriteClientStats(client_id=client_id, stats=st)
def testMessageHandlers(self): client_id = self.SetupClient(100) done = threading.Event() def handle(l): worker_lib.ProcessMessageHandlerRequests(l) done.set() data_store.REL_DB.RegisterMessageHandler( handle, worker_lib.GRRWorker.message_handler_lease_time, limit=1000) data_store.REL_DB.WriteMessageHandlerRequests([ rdf_objects.MessageHandlerRequest( client_id=client_id, handler_name="StatsHandler", request_id=12345, request=rdf_client_stats.ClientStats(RSS_size=1234)) ]) self.assertTrue(done.wait(10)) results = data_store.REL_DB.ReadClientStats( client_id=client_id, min_timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0), max_timestamp=rdfvalue.RDFDatetime.Now()) self.assertLen(results, 1) stats = results[0] self.assertEqual(stats.RSS_size, 1234) data_store.REL_DB.UnregisterMessageHandler(timeout=60) # Make sure there are no leftover requests. self.assertEqual(data_store.REL_DB.ReadMessageHandlerRequests(), [])
def testDownsampled(self): timestamp = rdfvalue.RDFDatetime.FromHumanReadable stats = rdf_client_stats.ClientStats( cpu_samples=[ rdf_client_stats.CpuSample( timestamp=timestamp("2001-01-01 00:00"), user_cpu_time=2.5, system_cpu_time=3.2, cpu_percent=0.5), rdf_client_stats.CpuSample( timestamp=timestamp("2001-01-01 00:05"), user_cpu_time=2.6, system_cpu_time=4.7, cpu_percent=0.6), rdf_client_stats.CpuSample( timestamp=timestamp("2001-01-01 00:10"), user_cpu_time=10.0, system_cpu_time=14.2, cpu_percent=0.9), rdf_client_stats.CpuSample( timestamp=timestamp("2001-01-01 00:12"), user_cpu_time=12.3, system_cpu_time=14.9, cpu_percent=0.1), rdf_client_stats.CpuSample( timestamp=timestamp("2001-01-01 00:21"), user_cpu_time=16.1, system_cpu_time=22.3, cpu_percent=0.4) ], io_samples=[ rdf_client_stats.IOSample( timestamp=timestamp("2001-01-01 00:00"), read_count=0, write_count=0), rdf_client_stats.IOSample( timestamp=timestamp("2001-01-01 00:02"), read_count=3, write_count=5), rdf_client_stats.IOSample( timestamp=timestamp("2001-01-01 00:12"), read_count=6, write_count=8), ]) expected = rdf_client_stats.ClientStats( cpu_samples=[ rdf_client_stats.CpuSample( timestamp=timestamp("2001-01-01 00:05"), user_cpu_time=2.6, system_cpu_time=4.7, cpu_percent=0.55), rdf_client_stats.CpuSample( timestamp=timestamp("2001-01-01 00:12"), user_cpu_time=12.3, system_cpu_time=14.9, cpu_percent=0.5), rdf_client_stats.CpuSample( timestamp=timestamp("2001-01-01 00:21"), user_cpu_time=16.1, system_cpu_time=22.3, cpu_percent=0.4), ], io_samples=[ rdf_client_stats.IOSample( timestamp=timestamp("2001-01-01 00:02"), read_count=3, write_count=5), rdf_client_stats.IOSample( timestamp=timestamp("2001-01-01 00:12"), read_count=6, write_count=8), ]) actual = rdf_client_stats.ClientStats.Downsampled( stats, interval=rdfvalue.Duration.From(10, rdfvalue.MINUTES)) self.assertEqual(actual, expected)
def ProcessMessage(self, message): """Processes a stats response from the client.""" client_stats = rdf_client_stats.ClientStats(message.payload) self.ProcessResponse(message.source, client_stats)