def ParseResponses( self, knowledge_base: rdf_client.KnowledgeBase, responses: Iterable[rdfvalue.RDFValue], ) -> Iterator[rdf_client.WindowsServiceInformation]: """Parse Service registry keys and return WindowsServiceInformation.""" del knowledge_base # Unused. precondition.AssertIterableType(responses, rdf_client_fs.StatEntry) services = {} field_map = { "Description": "description", "DisplayName": "display_name", "Group": "group_name", "DriverPackageId": "driver_package_id", "ErrorControl": "error_control", "ImagePath": "image_path", "ObjectName": "object_name", "Start": "startup_type", "Type": "service_type", "Parameters/ServiceDLL": "service_dll" } # Field map key should be converted to lowercase because key acquired # through self._GetKeyName could have some characters in different # case than the field map, e.g. ServiceDLL and ServiceDll. field_map = {k.lower(): v for k, v in field_map.items()} for stat in responses: # Ignore subkeys if not stat.HasField("registry_data"): continue service_name = self._GetServiceName(stat.pathspec.path) reg_key = os.path.dirname(stat.pathspec.path) service_info = rdf_client.WindowsServiceInformation( name=service_name, registry_key=reg_key) services.setdefault(service_name, service_info) key = self._GetKeyName(stat.pathspec.path) if key in field_map: try: services[service_name].Set(field_map[key], stat.registry_data.GetValue()) except type_info.TypeValueError: # Flatten multi strings into a simple string if (stat.registry_type == rdf_client_fs.StatEntry.RegistryType.REG_MULTI_SZ): services[service_name].Set( field_map[key], utils.SmartUnicode(stat.registry_data.GetValue())) else: # Log failures for everything else # TODO(user): change this to yield a ParserAnomaly object. dest_type = type(services[service_name].Get( field_map[key])) logging.debug( "Wrong type set for %s:%s, expected %s, got %s", stat.pathspec.path, stat.registry_data.GetValue(), dest_type, type(stat.registry_data.GetValue())) return services.values()
def CheckBlobsExist(self, blob_ids): precondition.AssertIterableType(blob_ids, rdf_objects.BlobID) return self.delegate.CheckBlobsExist(blob_ids)
def ApplyParsersToResponses(parser_factory, responses, flow_obj): """Parse responses with applicable parsers. Args: parser_factory: A parser factory for specific artifact. responses: A list of responses from the client. flow_obj: An artifact collection flow. Returns: A list of (possibly parsed) responses. """ if not parser_factory.HasParsers(): # If we don't have any parsers, we expect to use the unparsed responses. return responses # We have some processors to run. knowledge_base = flow_obj.state.knowledge_base @contextlib.contextmanager def ParseErrorHandler(): try: yield except parsers.ParseError as error: flow_obj.Log("Error encountered when parsing responses: %s", error) parsed_responses = [] if parser_factory.HasSingleResponseParsers(): for response in responses: for parser in parser_factory.SingleResponseParsers(): with ParseErrorHandler(): parsed_responses.extend( parser.ParseResponse(knowledge_base, response, flow_obj.args.path_type)) for parser in parser_factory.MultiResponseParsers(): with ParseErrorHandler(): parsed_responses.extend(parser.ParseResponses(knowledge_base, responses)) has_single_file_parsers = parser_factory.HasSingleFileParsers() has_multi_file_parsers = parser_factory.HasMultiFileParsers() if has_single_file_parsers or has_multi_file_parsers: precondition.AssertIterableType(responses, rdf_client_fs.StatEntry) pathspecs = [response.pathspec for response in responses] # TODO(amoser): This is not super efficient, AFF4 provided an api to open # all pathspecs at the same time, investigate if optimizing this is worth # it. filedescs = [] for pathspec in pathspecs: client_path = db.ClientPath.FromPathSpec(flow_obj.client_id, pathspec) filedescs.append(file_store.OpenFile(client_path)) if has_single_file_parsers: for response, filedesc in zip(responses, filedescs): for parser in parser_factory.SingleFileParsers(): with ParseErrorHandler(): parsed_responses.extend( parser.ParseFile(knowledge_base, response.pathspec, filedesc)) if has_multi_file_parsers: for parser in parser_factory.MultiFileParsers(): with ParseErrorHandler(): parsed_responses.extend( parser.ParseFiles(knowledge_base, pathspecs, filedescs)) return parsed_responses
def WriteBlobsWithUnknownHashes(self, blobs_data): precondition.AssertIterableType(blobs_data, bytes) return self.delegate.WriteBlobsWithUnknownHashes(blobs_data)
def ReadBlobs(self, blob_ids): precondition.AssertIterableType(blob_ids, rdf_objects.BlobID) return self.delegate.ReadBlobs(blob_ids)
def ReadBlobs( self, blob_ids: Iterable[rdf_objects.BlobID] ) -> Dict[rdf_objects.BlobID, Optional[bytes]]: precondition.AssertIterableType(blob_ids, rdf_objects.BlobID) return self.delegate.ReadBlobs(blob_ids)
def __init__(self, columns: List[Text], delimiter: Text = ","): precondition.AssertIterableType(columns, Text) precondition.AssertType(delimiter, Text) self._writer = Writer(delimiter=delimiter) self._columns = columns
def testStringSetCorrect(self): del self # Unused. precondition.AssertIterableType({"foo", "bar", "baz"}, str)
def testNonHomogeneousIntList(self): with self.assertRaises(TypeError): precondition.AssertIterableType([4, 8, 15, 16.0, 23, 42], int)
def ParseResponses(self, knowledge_base, responses): del knowledge_base # Unused. precondition.AssertIterableType(responses, rdf_protodict.Dict) return self.ParseMultiple(responses)
def testAssertEmptyCorrect(self): del self # Unused. precondition.AssertIterableType([], int) precondition.AssertIterableType({}, str)
def __init__(self, columns, delimiter = ","): precondition.AssertIterableType(columns, text) precondition.AssertType(delimiter, text) self._writer = Writer(delimiter=delimiter) self._columns = columns
def AddFileWithUnknownHash(client_path, blob_ids): """Add a new file consisting of given blob IDs.""" precondition.AssertType(client_path, db.ClientPath) precondition.AssertIterableType(blob_ids, rdf_objects.BlobID) return AddFilesWithUnknownHashes({client_path: blob_ids})[client_path]
def CheckBlobsExist( self, blob_ids: Iterable[rdf_objects.BlobID] ) -> Dict[rdf_objects.BlobID, bool]: precondition.AssertIterableType(blob_ids, rdf_objects.BlobID) return self.delegate.CheckBlobsExist(blob_ids)
def RegexListDisjunction(regex_list: Iterable[bytes]): precondition.AssertIterableType(regex_list, bytes) return b"(" + b")|(".join(regex_list) + b")"
def testIteratorIsNotIterable(self): with self.assertRaises(TypeError): precondition.AssertIterableType(iter(["foo", "bar", "baz"]), str)
def ParseResponses(self, knowledge_base, responses): precondition.AssertIterableType(responses, rdf_client_fs.StatEntry) return self.ParseMultiple(responses, knowledge_base)
def WriteBlobsWithUnknownHashes( self, blobs_data: Iterable[bytes]) -> List[rdf_objects.BlobID]: precondition.AssertIterableType(blobs_data, bytes) return self.delegate.WriteBlobsWithUnknownHashes(blobs_data)