def render_full(self, target, **_): value = target.v() name = target.choices.get( utils.SmartStr(value), target.default) or (u"UNKNOWN (%s)" % utils.SmartUnicode(value)) return text.Cell(name)
def render(self, renderer): if self.verbosity > 5: self.PrecacheSids() renderer.table_header([("TimeWritten", "timestamp", ""), ("Filename", "filename", ""), ("Computer", "computer", ""), ("Sid", "sid", ""), ("Source", "source", ""), ("Event Id", "event_id", ""), ("Event Type", "event_type", ""), ("Message", "message", "")]) for task, vad in self.FindEVTFiles(): filename = ntpath.basename( utils.SmartUnicode(vad.ControlArea.FilePointer.FileName)) for event in self.ScanEvents(vad, task.get_process_address_space()): args = ";".join( repr(utils.SmartStr(x)) for x in event.Data) renderer.table_row( event.TimeWritten, filename, event.Computer, event.Sid, event.Source, event.EventID, event.EventType, args)
def StoreData(self, name, data, raw=False, **options): """Stores the data in the named container member. This serializes the data and stores it in the named member. Not all types of data are serializable, so this may raise. For example, when using JSON to store the data, arbitrary python objects may not be used. Args: name: The name under which the data will be stored. data: The data to store. raw: If true we write the data directly without encoding to json. In this case data should be a string. """ with self.Create(name) as fd: if raw: to_write = utils.SmartStr(data) else: to_write = self.Encoder(data, **options) fd.write(to_write) # Update the inventory. if name != "inventory": self.inventory.setdefault( "$INVENTORY", {})[name] = dict(LastModified=time.time()) self.FlushInventory()
def _copy_file_to_image(self, renderer, resolver, volume, filename): image_urn = volume.urn.Append(utils.SmartStr(filename)) out_fd = None try: with open(filename, "rb") as in_fd: with aff4_image.AFF4Image.NewAFF4Image(resolver, image_urn, volume.urn) as out_fd: renderer.format("Adding file {0}\n", filename) resolver.Set(image_urn, lexicon.AFF4_STREAM_ORIGINAL_FILENAME, rdfvalue.XSDString(filename)) while 1: data = in_fd.read(self.BUFFERSIZE) if not data: break out_fd.write(data) except IOError: try: self.session.logging.debug( "Unable to read %s. Attempting raw access.", filename) # We can not just read this file, parse it from the NTFS. self._copy_raw_file_to_image(renderer, resolver, volume, filename) except IOError: self.session.logging.warn("Unable to read %s. Skipping.", filename) finally: if out_fd: resolver.Close(out_fd)
def collect(self): pool_offset = None pool_header = self.SearchForPoolHeader(self.plugin_args.offset, search=self.plugin_args.search) if pool_header: name = (pool_header.m("ProcessBilled").name or str(pool_header.Tag).encode("string-escape")) yield dict(divider=("{0:#x} is inside pool allocation with " "tag '{1}' ({2:#x}) and size {3:#x}".format( self.plugin_args.offset, name, pool_header, pool_header.size))) for relative_offset, info in self.GuessMembers( self.plugin_args.offset, size=self.plugin_args.size, search=self.plugin_args.search): if pool_header: pool_offset = (self.plugin_args.offset + relative_offset - pool_header.obj_offset) yield dict(offset=relative_offset, pool_offset=pool_offset, content=" ".join([ utils.SmartStr(x).encode("string-escape") for x in info ]))
def verify_hit(self, hit, address_space): signature = address_space.read(hit + 4, 3) size = self.profile.Object( "unsigned be short", offset=hit+2, vm=address_space) description = None if signature.startswith("\x30\x82"): data = address_space.read(hit, size + 4) if X509: try: cert = X509.load_cert_der_string(data) description = utils.SmartStr(cert.get_subject()) except X509.X509Error: pass return "X509", data, description elif signature.startswith("\x02\x01\x00"): data = address_space.read(hit, size + 4) if RSA: try: pem = ("-----BEGIN RSA PRIVATE KEY-----\n" + data.encode("base64") + "-----END RSA PRIVATE KEY-----") key = RSA.load_key_string(pem) description = "Verified: %s" % key.check_key() except Exception: pass return "RSA", data, description return None, None, None
def scan(self, offset=0, maxlen=None): for hit in super(CertScanner, self).scan(offset=offset, maxlen=maxlen): signature = self.address_space.read(hit + 4, 3) size = self.profile.Object( "unsigned be short", offset=hit+2, vm=self.address_space) description = None if signature.startswith("\x30\x82"): data = self.address_space.read(hit, size + 4) if X509: try: cert = X509.load_cert_der_string(data) description = utils.SmartStr(cert.get_subject()) except X509.X509Error: pass yield hit, "X509", data, description elif signature.startswith("\x02\x01\x00"): data = self.address_space.read(hit, size + 4) if RSA: try: pem = ("-----BEGIN RSA PRIVATE KEY-----\n" + data.encode("base64") + "-----END RSA PRIVATE KEY-----") key = RSA.load_key_string(pem) description = "Verified: %s" % key.check_key() except Exception: pass yield hit, "RSA", data, description
def Test(self, address_space): for offset, expected in self.test: expected = utils.SmartStr(expected) if (offset and expected != address_space.read(offset, len(expected))): return False return True
def from_primitive(cls, pem_string, session=None): result = cls(session=session) try: return result.from_raw_key( x509.load_pem_x509_certificate(utils.SmartStr(pem_string), backend=openssl.backend)) except (TypeError, ValueError, exceptions.UnsupportedAlgorithm) as e: raise CipherError("X509 Certificate invalid: %s" % e) return result
def from_primitive(cls, pem_string, session=None): result = cls(session=session) try: result.from_raw_key( serialization.load_pem_public_key(utils.SmartStr(pem_string), backend=openssl.backend)) except (TypeError, ValueError, exceptions.UnsupportedAlgorithm) as e: raise CipherError("Public Key invalid: %s" % e) return result
def _read_value(self, path_components): self.key_name = "\\".join(path_components[1:-1]) self.value_name = path_components[-1] with OpenKey(self._hive_handle, self.key_name) as key: # We are a value - we can be read but we can not be listed. self.value, value_type = QueryValueEx(key, self.value_name) self.st_mode = stat.S_IFREG self.value_type = self.registry_map[value_type] self.st_size = len(utils.SmartStr(self.value))
def Encoder(self, data, **options): if options.get("raw"): return utils.SmartStr(data) # If the user specifically wants to encode in yaml, then do so. if options.get("yaml"): return yaml.safe_dump(data, default_flow_style=False) return utils.PPrint(data)
def __repr__(self): value = utils.SmartStr(self) elide = "" if len(value) > 50: elide = "..." value = value[:50] return "%s (%s%s)" % (super(UnicodeString, self).__repr__(), value, elide)
def __repr__(self): value = utils.SmartStr(self) elide = "" if len(value) > 50: elide = "..." value = value[:50] return "%s (%s%s)" % (super(_UNICODE_STRING, self).__repr__(), value, elide)
def _copy_file_to_image(self, renderer, resolver, volume, filename, stat_entry=None): if stat_entry is None: try: stat_entry = os.stat(filename) except (OSError, IOError): return image_urn = volume.urn.Append(utils.SmartStr(filename)) out_fd = None try: with open(filename, "rb") as in_fd: renderer.format("Adding file {0}\n", filename) resolver.Set(image_urn, lexicon.AFF4_STREAM_ORIGINAL_FILENAME, rdfvalue.XSDString(os.path.abspath(filename))) progress = aff4.ProgressContext(length=stat_entry.st_size) if stat_entry.st_size < self.MAX_SIZE_FOR_SEGMENT: with volume.CreateMember(image_urn) as out_fd: # Only enable compression if we are using it. if (self.compression != lexicon.AFF4_IMAGE_COMPRESSION_STORED): out_fd.compression_method = zip.ZIP_DEFLATE out_fd.WriteStream(in_fd, progress=progress) else: resolver.Set(image_urn, lexicon.AFF4_IMAGE_COMPRESSION, rdfvalue.URN(self.compression)) with aff4_image.AFF4Image.NewAFF4Image( resolver, image_urn, volume.urn) as out_fd: out_fd.WriteStream(in_fd, progress=progress) except IOError: try: # Currently we can only access NTFS filesystems. if self.profile.metadata("os") == "windows": self.session.logging.debug( "Unable to read %s. Attempting raw access.", filename) # We can not just read this file, parse it from the NTFS. self._copy_raw_file_to_image(renderer, resolver, volume, filename) except IOError: self.session.logging.warn("Unable to read %s. Skipping.", filename) finally: if out_fd: resolver.Close(out_fd)
def Summary(self, item, formatstring=None, header=False, **options): """Returns a short summary of the object. The summary is a short human readable string, describing the object. """ try: if formatstring == "[addrpad]" and not header: return "%#014x" % item except TypeError: pass # Since we are the default renderer we must ensure this works. return utils.SmartStr(item)
def _read_key(self, path_components): # The path is just the hive name. if len(path_components) == 1: return # Its probably a key self.key_name = "\\".join(path_components[1:]) self.value_name = "" # Try to get the default value for this key with OpenKey(self._hive_handle, self.key_name) as key: # Check for default value. try: self.value, value_type = QueryValueEx(key, self.value_name) self.value_type = self.registry_map[value_type] self.st_size = len(utils.SmartStr(self.value)) except exceptions.WindowsError: pass
def render(self, renderer): pool_header = self.SearchForPoolHeader(self.offset, search=self.search) if pool_header: name = (pool_header.ProcessBilled.name or str(pool_header.Tag).encode("string-escape")) renderer.format( "{0:#x} is inside pool allocation with tag '{1}' ({2:#x})\n", self.offset, name, pool_header) renderer.table_header([("Offset", "offset", "[addr]"), ("Content", "content", "")]) for relative_offset, info in self.GuessMembers(self.offset, size=self.size, search=self.search): renderer.table_row( relative_offset, " ".join( [utils.SmartStr(x).encode("string-escape") for x in info]))
def _copy_raw_file_to_image(self, resolver, volume, filename): image_urn = volume.urn.Append(utils.SmartStr(filename)) drive, base_filename = os.path.splitdrive(filename) if not base_filename: return ntfs_session = self.session.add_session(filename=r"\\.\%s" % drive, profile="ntfs") ntfs_session.plugins.istat(2) ntfs = ntfs_session.GetParameter("ntfs") mft_entry = ntfs.MFTEntryByName(base_filename) data_as = mft_entry.open_file() self._copy_address_space_to_image(resolver, volume, image_urn, data_as) resolver.Set(image_urn, lexicon.AFF4_STREAM_ORIGINAL_FILENAME, rdfvalue.XSDString(os.path.abspath(filename)))
def render(self, renderer): """Renders the tasks to disk images, outputting progress as they go""" for task in self.filter_processes(): pid = task.UniqueProcessId task_address_space = task.get_process_address_space() if not task_address_space: renderer.format("Can not get task address space - skipping.") continue if self.fd: self.pedump.WritePEFile(self.fd, task_address_space, task.Peb.ImageBaseAddress) renderer.section() renderer.format( "Dumping {0}, pid: {1:6} into user provided " "fd.\n", task.ImageFileName, pid) # Create a new file. else: sanitized_image_name = re.sub( "[^a-zA-Z0-9-_]", "_", utils.SmartStr(task.ImageFileName)) filename = u"executable.%s_%s.exe" % (sanitized_image_name, pid) renderer.section() renderer.format("Dumping {0}, pid: {1:6} output: {2}\n", task.ImageFileName, pid, filename) with renderer.open(directory=self.dump_dir, filename=filename, mode="wb") as fd: # The Process Environment Block contains the dos header: self.pedump.WritePEFile(fd, task_address_space, task.Peb.ImageBaseAddress)
def render(self, renderer): for line in utils.SmartStr(self.target).splitlines(): renderer.format("{0}\n", line)
def __str__(self): return utils.SmartStr(self)
def __init__(self, name, start, length): self.name = utils.SmartStr(name) self.base = start self.length = length self.end = self.base + self.length
def render(self, renderer): for line in utils.SmartStr(self.target).splitlines(): renderer.write(line + "\n")
def GetData(self, item, **_): return utils.SmartStr(item)
def EncodeToJsonSafe(self, item, **_): return utils.SmartStr(item)
def GetState(self, item, **_): return dict(address=item.address, symbol=utils.SmartStr(item))
def __unicode__(self): value = self.v() # Choices dict keys are always strings. return self.choices.get( utils.SmartStr(value), self.default) or (u"UNKNOWN (%s)" % utils.SmartUnicode(value))
def Summary(self, item, **_): return utils.SmartStr(item.get("str", ""))
def Summary(self, item, **_): return utils.SmartStr(item)