def _parse_log_record(self, result): """ Construct a log record from data within the scan result. Arguments: result -- The scan result. Returns: A log record respresenting the scan result. """ # Build the results portion of the log record. This will be a list of # dictionaries, where each dictionary is the result of a single buffer's # scan. The list will contain all of the buffers that were exploded from # a root buffer's scan in no particular order. buffer_results = [] for scan_object in result.files.itervalues(): # Do not damage the original result -> clone buffer_result = clone_object(scan_object.__dict__) # Don't log buffers here, just metadata if "buffer" in buffer_result: del buffer_result["buffer"] buffer_results.append(buffer_result) # Construct the log record with fields useful for log processing and # routing log_record = { 'source': result.source, 'scan_result': self._log_record_strainer(buffer_results) } return log_record
def getJSON(result): ''' This function takes the result of a scan, and returns the JSON output. Arguments: result -- a fully populated scan result set. Returns: A string representation of the json formatted output. ''' resultText = '' # Build the results portion of the log record. This will be a list of # dictionaries, where each dictionary is the result of a single buffer's # scan. The list will contain all of the buffers that were exploded from # a root buffer's scan in the order they were processed. buffer_results = [None] * len(result.files) for scan_object in result.files.itervalues(): # Do not damage the original result -> clone buffer_result = clone_object(scan_object.__dict__) # Don't log buffers here, just metadata if "buffer" in buffer_result: del buffer_result["buffer"] buffer_results[buffer_result["order"]] = buffer_result # Construct the log record with fields useful for log processing and # routing log_record = { 'source': result.source, 'scan_result': buffer_results } resultText = json.dumps(log_record) return resultText
def _parse_log_record(self, result): ''' Construct a log record from data within the scan result. Arguments: result -- The scan result. Returns: A log record respresenting the scan result. ''' # Build the results portion of the log record. This will be a list of # dictionaries, where each dictionary is the result of a single buffer's # scan. The list will contain all of the buffers that were exploded from # a root buffer's scan in no particular order. buffer_results = [] for scan_object in result.files.itervalues(): # Do not damage the original result -> clone buffer_result = clone_object(scan_object.__dict__) # Don't log buffers here, just metadata if "buffer" in buffer_result: del buffer_result["buffer"] buffer_results.append(buffer_result) # Construct the log record with fields useful for log processing and # routing log_record = { 'source': result.source, 'scan_result': self._log_record_strainer(buffer_results) } return log_record
def getJSON(result): """ This function takes the result of a scan, and returns the JSON output. Arguments: result -- a fully populated scan result set. Returns: A string representation of the json formatted output. """ resultText = '' # Build the results portion of the log record. This will be a list of # dictionaries, where each dictionary is the result of a single buffer's # scan. The list will contain all of the buffers that were exploded from # a root buffer's scan in the order they were processed. buffer_results = [None] * len(result.files) for scan_object in result.files.itervalues(): # Do not damage the original result -> clone buffer_result = clone_object(scan_object.__dict__) # Don't log buffers here, just metadata if "buffer" in buffer_result: del buffer_result["buffer"] buffer_results[buffer_result["order"]] = buffer_result # Construct the log record with fields useful for log processing and # routing log_record = {'source': result.source, 'scan_result': buffer_results} resultText = json.dumps(log_record) return resultText