def _ParseResponses(self, processor_obj, responses, responses_obj, artifact_name, source, output_collection_map): """Create a result parser sending different arguments for diff parsers. Args: processor_obj: A Processor object that inherits from Parser. responses: A list of, or single response depending on the processors process_together setting. responses_obj: The responses object itself. artifact_name: Name of the artifact that generated the responses. source: The source responsible for producing the responses. output_collection_map: dict of collections when splitting by artifact Raises: RuntimeError: On bad parser. """ _ = responses_obj result_iterator = artifact.ApplyParserToResponses(processor_obj, responses, source, self, self.token) artifact_return_types = self._GetArtifactReturnTypes(source) if result_iterator: with data_store.DB.GetMutationPool() as pool: # If we have a parser, do something with the results it produces. for result in result_iterator: result_type = result.__class__.__name__ if result_type == "Anomaly": self.SendReply(result) elif (not artifact_return_types or result_type in artifact_return_types): self.state.response_count += 1 self.SendReply(result) self._WriteResultToSplitCollection(result, artifact_name, output_collection_map, pool)
def _ProcessData(self, processor, responses, artifact_name): """Runs parsers over the raw data and maps it to artifact_data types. Args: processor: A processor method to use. responses: One or more response items, depending on whether the processor uses Parse or ParseMultiple. artifact_name: The name of the artifact. """ # Now parse the data and set state. artifact_data = self.state.host_data.get(artifact_name) result_iterator = artifact.ApplyParserToResponses( processor, responses, self) for rdf in result_iterator: if isinstance(rdf, rdf_anomaly.Anomaly): artifact_data["ANOMALY"].append(rdf) else: artifact_data["PARSER"].append(rdf)