def collect(self): """Render output.""" count = 0 address_space = self.session.physical_address_space for buffer_as in scan.BufferASGenerator( self.session, address_space, self.plugin_args.start, self.plugin_args.start + self.plugin_args.limit): self.session.report_progress( "Scanning buffer %#x->%#x (%#x)", buffer_as.base_offset, buffer_as.end(), buffer_as.end() - buffer_as.base_offset) for match in self.rules.match(data=buffer_as.data): for buffer_offset, _, _ in match.strings: hit_offset = buffer_offset + buffer_as.base_offset count += 1 if count >= self.plugin_args.hits: break yield dict( Rule=match.rule, Offset=hit_offset, hexdump=utils.HexDumpedString( self.session.physical_address_space.read( hit_offset - self.plugin_args.pre_context, self.plugin_args.context + self.plugin_args.pre_context)))
def generate_hits(self, run): for buffer_as in scan.BufferASGenerator( self.session, run.address_space, run.start, run.end): self.session.logging.debug( "Scanning buffer %#x->%#x (length %#x)", buffer_as.base_offset, buffer_as.end(), buffer_as.end() - buffer_as.base_offset) for match in self.rules.match(data=buffer_as.data): for buffer_offset, name, value in match.strings: hit_offset = buffer_offset + buffer_as.base_offset yield match.rule, hit_offset
def collect(self): address_space = self.session.physical_address_space for buffer_as in scan.BufferASGenerator( self.session, address_space, self.plugin_args.start, self.plugin_args.start + self.plugin_args.limit): self.session.report_progress( "Scanning buffer %#x->%#x (%#x)", buffer_as.base_offset, buffer_as.end(), buffer_as.end() - buffer_as.base_offset) for match in self.unified_rule.match(data=buffer_as.data): for buffer_offset, string_name, value in sorted(match.strings): hit_offset = buffer_offset + buffer_as.base_offset self.context_buffer.add_hit(string_name, hit_offset, value) # Now re-run the original expression on all unique contexts. it = self.context_buffer.get_combined_context_buffers() for context, original_offset_map, pseudo_data in it: seen = set() self.session.report_progress( "Scanning pseudo buffer of length %d" % len(pseudo_data)) # Report any hits of the original sig on this context. for match in self.rules.match(data=pseudo_data): self.session.report_progress() # Only report a single hit of the same rule on the same context. dedup_key = (match.rule, context) if dedup_key in seen: continue seen.add(dedup_key) for buffer_offset, _, value in match.strings: hit_offset = original_offset_map.get(buffer_offset) if hit_offset is not None: if isinstance(context, int): owner = self.session.profile._EPROCESS(context) else: owner = context yield dict( Owner=owner, Rule=match.rule, Offset=hit_offset, HexDump=utils.HexDumpedString( address_space.read( hit_offset - self.plugin_args.pre_context, self.plugin_args.context + self.plugin_args.pre_context)), Context=pfn.PhysicalAddressContext( self.session, hit_offset))
def collect(self): pfn_hits = {} hits = {} pfn_context = {} address_space = self.session.physical_address_space for buffer_as in scan.BufferASGenerator( self.session, address_space, self.plugin_args.start, self.plugin_args.start + self.plugin_args.limit): self.session.report_progress( "Scanning buffer %#x->%#x (%#x)", buffer_as.base_offset, buffer_as.end(), buffer_as.end() - buffer_as.base_offset) for match in self.unified_rule.match(data=buffer_as.data): for buffer_offset, _, value in sorted(match.strings): hit_offset = buffer_offset + buffer_as.base_offset pfn_id = hit_offset >> 12 if pfn_id not in pfn_context: context_strings = self.get_contexts( address_space, pfn_id << 12) if context_strings: pfn_context[pfn_id] = context_strings else: self.session.logging.debug( "No process context for %#x", hit_offset) pfn_hits.setdefault(pfn_id, set()).add((hit_offset, value)) for pfn_hit, hit_offsets in pfn_hits.iteritems(): if pfn_hit in pfn_context: contexts = pfn_context[pfn_hit] for hit_offset, value in hit_offsets: for context in contexts: if context: hits.setdefault(context, {})[hit_offset] = value else: self.session.logging.debug( "Context for %#x invalid", hit_offset) # Now re-run the original expression on all unique contexts. pad = "\xFF" * 10 for context, context_data in hits.iteritems(): data = [] data_len = 0 # Map the original offset to the dummy buffer offset. omap = {} for hit_offset, value in context_data.iteritems(): omap[data_len] = hit_offset # Some padding separates out the sigs. data.append(value) data.append(pad) data_len += len(value) + len(pad) pseudo_data = "".join(data) seen = set() # Report any hits of the original sig on this context. for match in self.rules.match(data=pseudo_data): # Only report a single hit of the same rule on the same context. dedup_key = (match.rule, context) if dedup_key in seen: continue seen.add(dedup_key) for buffer_offset, _, value in match.strings: hit_offset = omap.get(buffer_offset) if hit_offset is not None: if isinstance(context, int): owner = self.session.profile._EPROCESS(context) else: owner = context yield dict( Owner=owner, Rule=match.rule, Offset=hit_offset, HexDump=utils.HexDumpedString( address_space.read( hit_offset - self.plugin_args.pre_context, self.plugin_args.context + self.plugin_args.pre_context)), Context=pfn.PhysicalAddressContext( self.session, hit_offset))