def RestrictedPickler(raw): if six.PY3: return RestrictedUnpickler(io.BytesIO(utils.SmartStr(raw))) unpickler = pickle.Unpickler(io.BytesIO(utils.SmartStr(raw))) unpickler.find_global = None return unpickler
def verify_hit(self, hit, address_space): signature = address_space.read(hit + 4, 3) size = self.profile.Object( "unsigned be short", offset=hit+2, vm=address_space) description = None if signature.startswith("\x30\x82"): data = address_space.read(hit, size + 4) if X509: try: cert = X509.load_cert_der_string(data) description = utils.SmartStr(cert.get_subject()) except X509.X509Error: pass return "X509", data, description elif signature.startswith("\x02\x01\x00"): data = address_space.read(hit, size + 4) if RSA: try: pem = ("-----BEGIN RSA PRIVATE KEY-----\n" + data.encode("base64") + "-----END RSA PRIVATE KEY-----") key = RSA.load_key_string(pem) description = "Verified: %s" % key.check_key() except Exception: pass return "RSA", data, description return None, None, None
def Definition(self, vtype): # Forward declerations are not interesting. if "DW_AT_declaration" in self.attributes: return if self.name in vtype and vtype[self.name][0] != self.size: self.session.logging.warning( "Structs of different sizes but same name") count = 1 result = [self.size, {}] for member in self.members: if isinstance(member, DW_TAG_member): name = member.name # Make nicer names for annonymous things (usually unions). if name.startswith(b"__unnamed_"): name = utils.SmartStr("u%s" % count) count += 1 result[1][name] = member.VType() # Only emit structs with actual members in them. if result[1]: vtype[self.name] = result
def collect(self): pool_offset = None pool_header = self.SearchForPoolHeader(self.plugin_args.offset, search=self.plugin_args.search) if pool_header: name = (pool_header.m("ProcessBilled").name or str(pool_header.Tag).encode("string-escape")) yield dict(divider=("{0:#x} is inside pool allocation with " "tag '{1}' ({2:#x}) and size {3:#x}".format( self.plugin_args.offset, name, pool_header, pool_header.size))) for relative_offset, info in self.GuessMembers( self.plugin_args.offset, size=self.plugin_args.size, search=self.plugin_args.search): if pool_header: pool_offset = (self.plugin_args.offset + relative_offset - pool_header.obj_offset) yield dict(offset=relative_offset, pool_offset=pool_offset, content=" ".join([ utils.SmartStr(x).encode("string-escape") for x in info ]))
def calculate(self): if self.session.physical_address_space.volatile: return obj.NoneObject("No fingerprint for volatile image.") result = [] profile = self.session.profile phys_as = self.session.physical_address_space address_space = self.session.GetParameter("default_address_space") label = profile.get_constant_object("_osversion", "String") result.append((address_space.vtop(label.obj_offset), label.v())) label = profile.get_constant_object("_version", "String") result.append((address_space.vtop(label.obj_offset), label.v())) label = profile.get_constant_object("_sched_tick", "String", length=8, term=None) result.append((address_space.vtop(label.obj_offset), label.v())) catfish_offset = self.session.GetParameter("catfish_offset") result.append((catfish_offset, phys_as.read(catfish_offset, 8))) # List of processes should also be pretty unique. for task in self.session.plugins.pslist().filter_processes(): name = task.name.cast("String", length=30) task_name_offset = address_space.vtop(name.obj_offset) result.append((task_name_offset, name.v())) return dict(hash=hashlib.sha1(utils.SmartStr(result)).hexdigest(), tests=result)
def from_primitive(cls, pem_string, session=None): result = cls(session) try: result._value = RSA.importKey(utils.SmartStr(pem_string)) except (TypeError, ValueError) as e: raise CipherError("Public Key invalid: %s" % e) return result
def __init__(self, needles=None, **kwargs): """Init. Args: needles: A list of strings we search for. **kwargs: passthrough. Raises: RuntimeError: No needles provided. """ super(MultiStringFinderCheck, self).__init__(**kwargs) # It is an error to not provide something to search for and Acora will # raise later. if not needles: raise RuntimeError("No needles provided to search.") # Passing large patterns to the acora module will cause huge memory # consumption. if max([len(x) for x in needles]) > 50: raise RuntimeError("Pattern too large to search with ahocorasic.") # Our scanner must operate on raw bytes so we need to make # sure all the needles are bytes too. byte_needles = [utils.SmartStr(x) for x in needles] tree = acora.AcoraBuilder(*byte_needles) self.engine = tree.build() self.base_offset = None self.hits = None
def render(self, renderer): if self.plugin_args.verbosity > 5: self.PrecacheSids() renderer.table_header([("TimeWritten", "timestamp", ""), ("Filename", "filename", ""), ("Computer", "computer", ""), ("Sid", "sid", ""), ("Source", "source", ""), ("Event Id", "event_id", ""), ("Event Type", "event_type", ""), ("Message", "message", "")]) for task, vad in self.FindEVTFiles(): filename = ntpath.basename( utils.SmartUnicode(vad.ControlArea.FilePointer.FileName)) for event in self.ScanEvents(vad, task.get_process_address_space()): args = ";".join( repr(utils.SmartStr(x)) for x in event.Data) renderer.table_row( event.TimeWritten, filename, event.Computer, event.Sid, event.Source, event.EventID, event.EventType, args)
def __init__(self, length=1024, max_length=1024000, term=b"\x00", **kwargs): """Constructor. Args: length: The maximum length of the string. terminator: The terminator for this string. If None, there will be no checking for null terminations (Pure character array). """ super(String, self).__init__(**kwargs) # Allow length to be a callable: if callable(length): length = length(self.obj_parent) if term is not None: term = utils.SmartStr(term) self.term = term self.length = int(length) self.max_length = max_length
def scan(self, offset=0, maxlen=None): for hit in super(CertScanner, self).scan(offset=offset, maxlen=maxlen): signature = self.address_space.read(hit + 4, 3) size = self.profile.Object( "unsigned be short", offset=hit+2, vm=self.address_space) description = None if signature.startswith("\x30\x82"): data = self.address_space.read(hit, size + 4) if X509: try: cert = X509.load_cert_der_string(data) description = utils.SmartStr(cert.get_subject()) except X509.X509Error: pass yield hit, "X509", data, description elif signature.startswith("\x02\x01\x00"): data = self.address_space.read(hit, size + 4) if RSA: try: pem = ("-----BEGIN RSA PRIVATE KEY-----\n" + data.encode("base64") + "-----END RSA PRIVATE KEY-----") key = RSA.load_key_string(pem) description = "Verified: %s" % key.check_key() except Exception: pass yield hit, "RSA", data, description
def render_full(self, target, **_): value = target.v() name = target.choices.get( utils.SmartStr(value), target.default) or (u"UNKNOWN (%s)" % utils.SmartUnicode(value)) return text.Cell(name)
def __init__(self, process_name=b"Idle", **kwargs): super(WinDTBScanner, self).__init__(**kwargs) needle_process_name = utils.SmartStr(process_name) needle = needle_process_name + b"\x00" * (15 - len(needle_process_name)) self.image_name_offset = self.profile.get_obj_offset( "_EPROCESS", "ImageFileName") self.checks = [["StringCheck", {"needle": needle}]]
def Test(self, address_space): for offset, expected in self.test: expected = utils.SmartStr(expected) if (offset and expected != address_space.read(offset, len(expected))): return False return True
def Encoder(self, data, **options): if options.get("raw"): return utils.SmartStr(data) if self.pretty_print: return utils.PPrint(data) return json.dumps(data, sort_keys=True, **options)
def __repr__(self): value = utils.SmartStr(self) elide = "" if len(value) > 50: elide = "..." value = value[:50] return "%s (%s%s)" % (super(UnicodeString, self).__repr__(), value, elide)
def from_primitive(cls, pem_string, session=None): result = cls(session=session) try: result.from_raw_key( serialization.load_pem_public_key(utils.SmartStr(pem_string), backend=openssl.backend)) except (TypeError, ValueError, exceptions.UnsupportedAlgorithm) as e: raise CipherError("Public Key invalid: %s" % e) return result
def from_primitive(cls, pem_string, session=None): result = cls(session=session) try: return result.from_raw_key( x509.load_pem_x509_certificate(utils.SmartStr(pem_string), backend=openssl.backend)) except (TypeError, ValueError, exceptions.UnsupportedAlgorithm) as e: raise CipherError("X509 Certificate invalid: %s" % e) return result
def _read_value(self, path_components): self.key_name = "\\".join(path_components[1:-1]) self.value_name = path_components[-1] with OpenKey(self._hive_handle, self.key_name) as key: # We are a value - we can be read but we can not be listed. self.value, value_type = QueryValueEx(key, self.value_name) self.st_mode = stat.S_IFREG self.value_type = self.registry_map[value_type] self.st_size = len(utils.SmartStr(self.value))
def GetState(self, item, **_): try: # If the string happens to be unicode safe we dont need to # encode it, but we still must mark it with a "*" to ensure the # decoder replaces it with a plain string. return dict(str=str(item, "utf8")) except UnicodeError: # If we failed to encode it into utf8 we must base64 encode it. return dict(b64=base64.b64encode(utils.SmartStr(item)))
def Encoder(self, data, **options): if options.get("raw"): return utils.SmartStr(data) # If the user specifically wants to encode in yaml, then do so. if options.get("yaml"): return yaml.safe_dump(data, default_flow_style=False) return utils.PPrint(data)
def _copy_file_to_image(self, resolver, volume, filename, stat_entry=None): if stat_entry is None: try: stat_entry = os.stat(filename) except (OSError, IOError): return image_urn = volume.urn.Append(utils.SmartStr(filename)) out_fd = None try: with open(filename, "rb") as in_fd: yield ("Adding file {0}".format(filename),) resolver.Set( image_urn, lexicon.AFF4_STREAM_ORIGINAL_FILENAME, rdfvalue.XSDString(os.path.abspath(filename))) progress = AFF4ProgressReporter( session=self.session, length=stat_entry.st_size) if stat_entry.st_size < self.MAX_SIZE_FOR_SEGMENT: with volume.CreateMember(image_urn) as out_fd: # Only enable compression if we are using it. if (self.compression != lexicon.AFF4_IMAGE_COMPRESSION_STORED): out_fd.compression_method = zip.ZIP_DEFLATE out_fd.WriteStream(in_fd, progress=progress) else: resolver.Set(image_urn, lexicon.AFF4_IMAGE_COMPRESSION, rdfvalue.URN(self.compression)) with aff4_image.AFF4Image.NewAFF4Image( resolver, image_urn, volume.urn) as out_fd: out_fd.WriteStream(in_fd, progress=progress) except IOError: try: # Currently we can only access NTFS filesystems. if self.session.profile.metadata("os") == "windows": self.session.logging.debug( "Unable to read %s. Attempting raw access.", filename) # We can not just read this file, parse it from the NTFS. self._copy_raw_file_to_image( resolver, volume, filename) except IOError: self.session.logging.warn( "Unable to read %s. Skipping.", filename) finally: if out_fd: resolver.Close(out_fd)
def write_file(self, data, **kwargs): spec = location.BlobUploadSpecs.from_json(self.read_file(**kwargs)) # Upload the file to the blob endpoint. resp = self.get_requests_session().post( spec.url, files={spec.name: io.BytesIO(utils.SmartStr(data))}) self._session.logging.debug("Uploaded file: %s (%s bytes)", spec.url, len(data)) return self._report_error(None, resp)
def name(self): if "DW_AT_name" in self.attributes: return self.attributes["DW_AT_name"].value if "DW_AT_sibling" in self.attributes: sibling = self.types.get(self.attributes["DW_AT_sibling"].value + self.die.cu.cu_offset) if sibling and sibling.die.tag == "DW_TAG_typedef": return sibling.name return utils.SmartStr("__unnamed_%s" % self.die.offset)
def add_signature(self, data, url, headers): # Calculate the signature on the data. private_key = self._config.client.writeback.private_key assertion = crypto.HTTPAssertion.from_keywords(timestamp=time.time(), url=url).to_json() signature = crypto.HTTPSignature.from_keywords( assertion=assertion, client_id=self._config.client.writeback.client_id, public_key=private_key.public_key(), signature=private_key.sign(utils.SmartStr(assertion + data))) headers["x-rekall-signature"] = signature.to_json()
def Summary(self, item, formatstring=None, header=False, **options): """Returns a short summary of the object. The summary is a short human readable string, describing the object. """ try: if formatstring == "[addrpad]" and not header: return "%#014x" % item except TypeError: pass # Since we are the default renderer we must ensure this works. return utils.SmartStr(item)
def render(self, renderer): scanner = scan.MultiStringScanner( needles=[utils.SmartStr(x) for x in self.plugin_args.keyword], address_space=self.plugin_args.address_space, session=self.session) for hit, _ in scanner.scan(offset=self.plugin_args.offset, maxlen=self.plugin_args.limit): hexdumper = self.session.plugins.dump( offset=hit - 16, length=self.plugin_args.context + 16, address_space=self.plugin_args.address_space) hexdumper.render(renderer)
def _read_key(self, path_components): # The path is just the hive name. if len(path_components) == 1: return # Its probably a key self.key_name = "\\".join(path_components[1:]) self.value_name = "" # Try to get the default value for this key with OpenKey(self._hive_handle, self.key_name) as key: # Check for default value. try: self.value, value_type = QueryValueEx(key, self.value_name) self.value_type = self.registry_map[value_type] self.st_size = len(utils.SmartStr(self.value)) except WindowsError: pass
def _copy_raw_file_to_image(self, resolver, volume, filename): image_urn = volume.urn.Append(utils.SmartStr(filename)) drive, base_filename = os.path.splitdrive(filename) if not base_filename: return ntfs_session = self.session.add_session(filename=r"\\.\%s" % drive, profile="ntfs") ntfs_session.plugins.istat(2) ntfs = ntfs_session.GetParameter("ntfs") mft_entry = ntfs.MFTEntryByName(base_filename) data_as = mft_entry.open_file() self._copy_address_space_to_image(resolver, volume, image_urn, data_as) resolver.Set(image_urn, lexicon.AFF4_STREAM_ORIGINAL_FILENAME, rdfvalue.XSDString(os.path.abspath(filename)))
def _StoreData(self, name, to_write, **options): path = self.GetAbsolutePathName(name) self.EnsureDirectoryExists(os.path.dirname(path)) # If we are asked to write uncompressed files we do. if options.get("uncompressed"): with open(path, "wt") as out_fd: out_fd.write(utils.SmartUnicode(to_write)) self._dirty = True return # We need to update the file atomically in case someone else is trying # to open it right now. Since the files are expected to be fairly small # its ok to compress into memory and just write atomically. fd = io.BytesIO() with gzip.GzipFile(mode="wb", fileobj=fd) as gzip_fd: gzip_fd.write(utils.SmartStr(to_write)) with open(path + ".gz", "wb") as out_fd: out_fd.write(fd.getvalue()) self._dirty = True
def Summary(self, item, **_): return utils.SmartStr(item)