def ProcessValues(self, value_type, values_generator_fn): converter_classes = export.ExportConverter.GetConvertersByClass( value_type) if not converter_classes: return converters = [ cls(self.GetExportOptions()) for cls in converter_classes ] next_types = set() processed_types = set() while True: converted_responses = collection.Flatten( self._GenerateConvertedValues(converter, values_generator_fn()) for converter in converters) generator = self._GenerateSingleTypeIteration( next_types, processed_types, converted_responses) for chunk in self.ProcessSingleTypeExportedValues( value_type, generator): yield chunk if not next_types: break
def WriteClientStats(self, client_id, stats): """Stores a ClientStats instance.""" if client_id not in collection.Flatten(self.ReadAllClientIDs()): raise db.UnknownClientError(client_id) self.client_stats[client_id][rdfvalue.RDFDatetime.Now()] = stats
def AllParserTypes(self) -> Iterator[Type[Parser[_RDFValue]]]: """Returns all known parser types applicable for the artifact.""" return collection.Flatten([ self.SingleResponseParserTypes(), self.MultiResponseParserTypes(), self.SingleFileParserTypes(), self.MultiFileParserTypes(), ])
def _GetMostRequestedUsernames(context): requests = data_store.REL_DB.ReadApprovalRequests( context.username, rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT, include_expired=True) users = collection.Flatten(req.notified_users for req in requests) user_counts = collections.Counter(users) return [username for (username, _) in user_counts.most_common()]
def WriteClientStats(self, client_id, stats): """Stores a ClientStats instance.""" if client_id not in collection.Flatten(self.ReadAllClientIDs()): raise db.UnknownClientError(client_id) if stats.timestamp is None: stats.timestamp = rdfvalue.RDFDatetime.Now() copy = rdf_client_stats.ClientStats(stats) self.client_stats[client_id][copy.timestamp] = copy
def AllParsers(self): """Retrieves all known parser applicable for the artifact. Returns: An iterator over parser instances. """ return collection.Flatten([ self.SingleResponseParsers(), self.MultiResponseParsers(), self.SingleFileParsers(), self.MultiFileParsers(), ])
def testGenerator(self): def Foo(): yield "foo" yield "bar" def Quux(): yield "baz" yield "quux" def Norf(): yield Foo() yield Quux() flattened = collection.Flatten(Norf()) self.assertListEqual(list(flattened), ["foo", "bar", "baz", "quux"])
def AddClientKeywords(self, client_id, keywords, cursor=None): """Associates the provided keywords with the client.""" cid = db_utils.ClientIDToInt(client_id) keywords = set(keywords) args = [(cid, mysql_utils.Hash(kw), kw) for kw in keywords] args = list(collection.Flatten(args)) query = """ INSERT INTO client_keywords (client_id, keyword_hash, keyword) VALUES {} ON DUPLICATE KEY UPDATE timestamp = NOW(6) """.format(", ".join(["(%s, %s, %s)"] * len(keywords))) try: cursor.execute(query, args) except MySQLdb.IntegrityError as e: raise db.UnknownClientError(client_id, cause=e)
def AddClientLabels(self, client_id, owner, labels, cursor=None): """Attaches a list of user labels to a client.""" cid = db_utils.ClientIDToInt(client_id) labels = set(labels) args = [(cid, mysql_utils.Hash(owner), owner, label) for label in labels] args = list(collection.Flatten(args)) query = """ INSERT IGNORE INTO client_labels (client_id, owner_username_hash, owner_username, label) VALUES {} """.format(", ".join(["(%s, %s, %s, %s)"] * len(labels))) try: cursor.execute(query, args) except MySQLdb.IntegrityError as e: raise db.UnknownClientError(client_id, cause=e)
def testList(self): flattened = collection.Flatten([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) self.assertListEqual(list(flattened), [1, 2, 3, 4, 5, 6, 7, 8, 9])
def testTuple(self): flattened = collection.Flatten(((4, 8, 15), (16, 23, 42))) self.assertListEqual(list(flattened), [4, 8, 15, 16, 23, 42])
def BuildFlowList(root_urn, count, offset, with_state_and_context=False, token=None): if not count: stop = None else: stop = offset + count root_children_urns = aff4.FACTORY.Open(root_urn, token=token).ListChildren() root_children_urns = sorted( root_children_urns, key=lambda x: x.age, reverse=True) root_children_urns = root_children_urns[offset:stop] root_children = aff4.FACTORY.MultiOpen( root_children_urns, aff4_type=flow.GRRFlow, token=token) root_children = sorted( root_children, key=ApiListFlowsHandler._GetCreationTime, reverse=True) nested_children_urns = dict( aff4.FACTORY.RecursiveMultiListChildren( [fd.urn for fd in root_children])) nested_children = aff4.FACTORY.MultiOpen( set(collection.Flatten(itervalues(nested_children_urns))), aff4_type=flow.GRRFlow, token=token) nested_children_map = dict((x.urn, x) for x in nested_children) def BuildList(fds, parent_id=None): """Builds list of flows recursively.""" result = [] for fd in fds: try: urn = fd.symlink_urn or fd.urn if parent_id: flow_id = "%s/%s" % (parent_id, urn.Basename()) else: flow_id = urn.Basename() api_flow = ApiFlow().InitFromAff4Object( fd, flow_id=flow_id, with_state_and_context=with_state_and_context) except AttributeError: # If this doesn't work there's no way to recover. continue try: children_urns = nested_children_urns[fd.urn] except KeyError: children_urns = [] children = [] for urn in children_urns: try: children.append(nested_children_map[urn]) except KeyError: pass children = sorted( children, key=ApiListFlowsHandler._GetCreationTime, reverse=True) try: api_flow.nested_flows = BuildList(children, parent_id=flow_id) except KeyError: pass result.append(api_flow) return result return ApiListFlowsResult(items=BuildList(root_children))