def ProcessClients(self, responses): """Does the work.""" del responses end = rdfvalue.RDFDatetime.Now() - db.CLIENT_STATS_RETENTION client_urns = export_utils.GetAllClients(token=self.token) for batch in collection.Batch(client_urns, 10000): with data_store.DB.GetMutationPool() as mutation_pool: for client_urn in batch: mutation_pool.DeleteAttributes( client_urn.Add("stats"), [u"aff4:stats"], start=0, end=end.AsMicrosecondsSinceEpoch()) self.HeartBeat() if data_store.RelationalDBEnabled(): total_deleted_count = 0 for deleted_count in data_store.REL_DB.DeleteOldClientStats( yield_after_count=_STATS_DELETION_BATCH_SIZE, retention_time=end): self.HeartBeat() total_deleted_count += deleted_count self.Log("Deleted %d ClientStats that expired before %s", total_deleted_count, end)
def Run(self): self.start = 0 self.end = int(1e6 * (time.time() - self.MAX_AGE)) client_urns = export_utils.GetAllClients(token=self.token) for batch in collection.Batch(client_urns, 10000): with data_store.DB.GetMutationPool() as mutation_pool: for client_urn in batch: mutation_pool.DeleteAttributes(client_urn.Add("stats"), [u"aff4:stats"], start=self.start, end=self.end) self.HeartBeat()
def ProcessClients(self, unused_responses): """Does the work.""" self.start = 0 self.end = int(1e6 * (time.time() - self.MAX_AGE)) client_urns = export_utils.GetAllClients(token=self.token) for batch in utils.Grouper(client_urns, 10000): with data_store.DB.GetMutationPool() as mutation_pool: for client_urn in batch: mutation_pool.DeleteAttributes(client_urn.Add("stats"), [u"aff4:stats"], start=self.start, end=self.end) self.HeartBeat()