예제 #1
0
파일: mspdb.py 프로젝트: the80srobot/rekall
    def render(self, renderer):
        result = self.parse_pdb()

        if self.output_filename:
            with renderer.open(filename=self.output_filename,
                               directory=self.dump_dir,
                               mode="wb") as fd:
                fd.write(utils.PPrint(result))
        else:
            renderer.write(utils.PPrint(result))
예제 #2
0
    def render(self, renderer):
        vtypes = {}

        for i, (struct_name, definition) in enumerate(self.tpi.Structs()):
            self.session.report_progress(" Exporting %s: %s", i, struct_name)
            struct_name = str(struct_name)
            existing_definition = vtypes.get(struct_name)
            if existing_definition:
                # Merge the old definition into the new definition.
                definition[1].update(existing_definition[1])

            vtypes[struct_name] = definition

        self.metadata.update(dict(
            ProfileClass=self.profile_class,
            Type="Profile",
            PDBFile=os.path.basename(self.filename),
            ))

        self.metadata.update(self.tpi.metadata)

        # Demangle all constants.
        demangler = pe_vtypes.Demangler(self.metadata)
        constants = {}
        for name, value in self.tpi.constants.iteritems():
            constants[demangler.DemangleName(name)] = value

        functions = {}
        for name, value in self.tpi.functions.iteritems():
            functions[demangler.DemangleName(name)] = value

        vtypes = self.PostProcessVTypes(vtypes)

        result = {
            "$METADATA": self.metadata,
            "$STRUCTS": vtypes,
            "$ENUMS": self.tpi.enums,
            }

        if not self.concise:
            result["$REVENUMS"] = self.tpi.rev_enums
            result["$CONSTANTS"] = constants
            result["$FUNCTIONS"] = functions

        if self.output_filename:
            with renderer.open(filename=self.output_filename,
                               directory=self.dump_dir,
                               mode="wb") as fd:
                fd.write(utils.PPrint(result))
        else:
            renderer.write(utils.PPrint(result))
예제 #3
0
def RebuildInventory():
    old_inventory = {}
    try:
        with gzip.GzipFile(filename="inventory.gz", mode="rb") as outfd:
            old_inventory = json.load(outfd)["$INVENTORY"]
    except IOError:
        pass

    inventory = {}
    metadata = dict(Type="Inventory", ProfileClass="Inventory")

    result = {
        "$METADATA": metadata,
        "$INVENTORY": inventory,
    }

    modified = False
    for root, _, files in os.walk('./'):
        for filename in files:
            if filename.endswith(".gz"):
                path = os.path.join(root, filename)
                profile_name = os.path.join(root[2:], filename[:-3])

                file_modified_time = os.stat(path).st_mtime
                try:
                    last_modified = old_inventory[profile_name]["LastModified"]

                    # If the current file is not fresher than the old file, we
                    # just copy the metadata from the old profile. Allow 1
                    # second grace for float round ups.
                    if file_modified_time <= last_modified + 1:
                        inventory[profile_name] = old_inventory[profile_name]
                        continue

                except KeyError:
                    pass

                session.report_progress("Adding %s to inventory", path)
                with gzip.GzipFile(filename=path, mode="rb") as fd:
                    data = json.load(fd)

                    metadata = inventory[profile_name] = data["$METADATA"]
                    inventory[profile_name][
                        "LastModified"] = file_modified_time
                    modified = True

                    # Check for profile validity.
                    if (metadata["ProfileClass"].lower() in PDB_WITH_STRUCTS
                            and "$STRUCTS" not in data):
                        # Should we force the user to add an exception? or to
                        # fix the profile in some way?
                        print("Profile %s is invalid! no $STRUCTS defined." %
                              path)

    if modified:
        # Update the last modified time for the inventory itself.
        result["LastModified"] = time.time()

    with gzip.GzipFile(filename="inventory.gz", mode="wb") as outfd:
        outfd.write(utils.PPrint(result))
예제 #4
0
    def Build(self, renderer):
        repository = self.args.repository
        profile_metadata = repository.Metadata(self.args.profile_name)

        sources = []
        for pattern in self.args.patterns:
            sources.extend(fnmatch.filter(repository.ListFiles(), pattern))

        # Find the latest modified source
        last_modified = 0
        for source in sources:
            source_metadata = repository.Metadata(source)
            last_modified = max(
                last_modified, source_metadata["LastModified"])

        if not profile_metadata or (
                last_modified > profile_metadata["LastModified"]):
            definitions = []
            for source in sources:
                definitions.extend(yaml.safe_load_all(
                    repository.GetData(source, raw=True)))

            # Transform the data as required.
            data = {
                "$ARTIFACTS": definitions,
                "$METADATA": dict(
                    ProfileClass="ArtifactProfile",
                )
            }

            repository.StoreData(self.args.profile_name, utils.PPrint(data),
                                 raw=True)
            renderer.format("Building artifact profile {0}\n",
                            self.args.profile_name)
예제 #5
0
    def Build(self, renderer):
        repository = self.args.repository
        changed_files = False
        for source in self.args.sources:
            profile_name = "OSX/%s" % source.split("/")[-1]
            profile_metadata = repository.Metadata(profile_name)

            # Profile does not exist - rebuild it.
            if not profile_metadata:
                data = repository.GetData(source)

                # Transform the data as required.
                data = self.TransformProfile(data)
                repository.StoreData(profile_name,
                                     utils.PPrint(data),
                                     raw=True)
                renderer.format("Building profile {0} from {1}\n",
                                profile_name, source)
                changed_files = True

        if changed_files and self.args.index or self.args.force_build_index:
            renderer.format("Building index for profile {0} from {1}\n",
                            self.args.profile_name, self.args.index)

            self.BuildIndex()
예제 #6
0
    def render(self, renderer):
        if self.plugin_args.converter:
            cls = ProfileConverter.classes.get(self.plugin_args.converter)
            if not cls:
                raise IOError("Unknown converter %s" %
                              self.plugin_args.converter)

            return cls(self.plugin_args.source,
                       profile_class=self.plugin_args.profile_class).Convert()

        try:
            input = io_manager.Factory(self.plugin_args.source,
                                       session=self.session,
                                       mode="r")
        except IOError:
            self.session.logging.critical(
                "Input profile file %s could not be opened.",
                self.plugin_args.source)
            return

        with input:
            profile = self.ConvertProfile(input)
            if profile:
                with renderer.open(filename=self.plugin_args.out_file,
                                   mode="wb") as output:
                    output.write(utils.PPrint(profile))
                    self.session.logging.info("Converted %s to %s", input,
                                              output.name)
예제 #7
0
    def Encoder(self, data, **options):
        if options.get("raw"):
            return utils.SmartStr(data)

        # If the user specifically wants to encode in yaml, then do so.
        if options.get("yaml"):
            return yaml.safe_dump(data, default_flow_style=False)

        return utils.PPrint(data)
예제 #8
0
    def render(self, renderer):
        with renderer.open(filename=self.spec, mode="rb") as fd:
            spec = yaml.safe_load(fd)

        if spec.get("type") == "struct":
            result = self.BuildStructIndex(spec)
        else:
            result = self.BuildDataIndex(spec)

        renderer.write(utils.PPrint(result))
예제 #9
0
def RebuildInventory():
    old_inventory = {}
    try:
        with gzip.GzipFile(filename="inventory.gz", mode="rb") as outfd:
            old_inventory = json.load(outfd)["$INVENTORY"]
    except IOError:
        pass

    inventory = {}
    metadata = dict(Type="Inventory", ProfileClass="Inventory")

    result = {
        "$METADATA": metadata,
        "$INVENTORY": inventory,
    }

    modified = False
    for root, _, files in os.walk('./'):
        for filename in files:
            if filename.endswith(".gz"):
                path = os.path.join(root, filename)
                profile_name = os.path.join(root[2:], filename[:-3])

                file_modified_time = os.stat(path).st_mtime
                try:
                    last_modified = old_inventory[profile_name]["LastModified"]

                    # If the current file is not fresher than the old file, we
                    # just copy the metadata from the old profile.
                    if file_modified_time >= last_modified:
                        inventory[profile_name] = old_inventory[profile_name]
                        continue

                except KeyError:
                    pass

                session.report_progress("Adding %s to inventory", path)
                with gzip.GzipFile(filename=path, mode="rb") as fd:
                    data = json.load(fd)

                    inventory[profile_name] = data["$METADATA"]
                    inventory[profile_name][
                        "LastModified"] = file_modified_time
                    modified = True

    if modified:
        # Update the last modified time for the inventory itself.
        result["LastModified"] = time.time()

    with gzip.GzipFile(filename="inventory.gz", mode="wb") as outfd:
        outfd.write(utils.PPrint(result))
예제 #10
0
    def render(self, renderer):
        vtypes = self.parser.VType()
        result = {
            "$METADATA": dict(
                Type="Profile",

                # This should probably be changed for something more specific.
                ProfileClass=self.profile_class),
            "$STRUCTS": vtypes,
            "$ENUMS": vtypes.pop("$ENUMS", {}),
            "$REVENUMS": vtypes.pop("$REVENUMS", {}),
            }

        renderer.write(utils.PPrint(result))
예제 #11
0
    def Build(self, renderer):
        repository = self.args.repository
        profile_metadata = repository.Metadata(self.args.profile_name)
        source_metadata = repository.Metadata(self.args.source)
        if not profile_metadata or (source_metadata["LastModified"] >
                                    profile_metadata["LastModified"]):
            data = repository.GetData(self.args.source)

            # Transform the data as required.
            data = self.TransformProfile(data)
            repository.StoreData(self.args.profile_name,
                                 utils.PPrint(data),
                                 raw=True)
            renderer.format("Building profile {0} from {1}\n",
                            self.args.profile_name, self.args.source)
예제 #12
0
    def render(self, renderer):
        spec = yaml.safe_load(open(self.spec))
        index = {}
        metadata = dict(Type="Profile", ProfileClass="Index")

        result = {"$METADATA": metadata, "$INDEX": index}

        repository_root = spec["repository_root"]
        highest_offset = 0

        for root, _, files in os.walk(
                os.path.join(repository_root, spec["path"])):
            for name in files:
                path = os.path.join(root, name)
                relative_path = os.path.splitext(
                    path[len(repository_root):])[0]

                if path.endswith(".gz"):
                    self.session.report_progress("Processing %s",
                                                 relative_path)
                    try:
                        file_data = gzip.open(path).read()
                        data = json.loads(file_data)
                    except Exception:
                        continue

                    index[relative_path] = []
                    for sym_spec in spec["symbols"]:
                        shift = sym_spec.get("shift", 0)
                        if "$CONSTANTS" not in data:
                            continue

                        offset = data["$CONSTANTS"].get(sym_spec["name"])

                        if not offset:
                            continue

                        index[relative_path].append(
                            (offset + shift, sym_spec["data"]))

                        # Store the highest offset, so the reader can optimize
                        # their reading.
                        highest_offset = max(
                            highest_offset,
                            offset + shift + len(sym_spec["data"]))

        metadata["max_offset"] = highest_offset
        renderer.write(utils.PPrint(result))
예제 #13
0
    def BuildAll(self, renderer):
        repository = self.args.repository
        guid_file = self.args.repository.GetData(self.args.guids)
        rejects_filename = self.args.guids + ".rejects"
        rejects = self.args.repository.GetData(rejects_filename, default={})
        reject_len = len(rejects)

        try:
            changed_files = set()
            for pdb_filename, guids in guid_file.iteritems():
                for guid in guids:
                    if guid in rejects:
                        continue

                    # If the profile exists in the repository continue.
                    if repository.Metadata("%s/%s" %
                                           (self.args.profile_name, guid)):
                        continue

                    def Reject(e, guid=guid, changed_files=changed_files):
                        print "GUID %s rejected: %s" % (guid, e)
                        rejects[guid] = str(e)
                        changed_files.remove(guid)

                    # Otherwise build it.
                    changed_files.add(guid)
                    self.pool.AddTask(self.LaunchBuilder,
                                      ("build", "%s/%s" %
                                       (pdb_filename, guid)),
                                      on_error=Reject)

            self.pool.Stop()

            if changed_files and self.args.index or self.args.force_build_index:
                renderer.format("Building index for profile {0} from {1}\n",
                                self.args.profile_name, self.args.index)

                self.BuildIndex()

        finally:
            if len(rejects) != reject_len:
                repository.StoreData(rejects_filename,
                                     utils.PPrint(rejects),
                                     raw=True)

            renderer.format("Updating inventory.\n")
            repository.StoreData("inventory", repository.RebuildInventory())
예제 #14
0
파일: util.py 프로젝트: scudette/rekall
    def run(self, key, cb, *args, **kwargs):
        raw = kwargs.pop("raw", False)
        desc = kwargs.pop("desc", None)
        if not self.cache_dir:
            now = time.time()
            result = cb(*args)
            if desc:
                logging.info("Completed %s in %d Seconds", desc,
                             time.time() - now)

            return result

        normalized_key = key.replace("/", "_")
        normalized_key = normalized_key.replace(".", "_")
        path = os.path.join(self.cache_dir, normalized_key)

        if not self.force:
            try:
                raw_data = open(path).read()
                if raw:
                    return raw_data

                json_data = json.loads(raw_data)
                result = json_serialization.load(json_data)
                logging.debug("Cache hit %s", path)

                return result
            except (IOError, OSError):
                pass

            except Exception as e:
                logging.error("Error loading from cache: %s" % e)

        now = time.time()
        result = cb(*args, **kwargs)
        if desc:
            logging.info("Completed %s in %d Seconds", desc, time.time() - now)

        with open(path, "wb") as fd:
            if raw:
                fd.write(result)
            else:
                fd.write(rekall_utils.PPrint(json_serialization.dump(result)))

        return result
예제 #15
0
def _make_profile(args):
    """A procedure that loads Pre-AST, computes a profile and stores it."""
    config_text = open(args.config_file_path).read()
    system_map_text = open(args.system_map_file_path).read()
    logging.info('LOADING PREPROCESSOR AST FROM: %s', args.pre_ast_path)
    preprocessed_ast = json_serialization.load_file(open(args.pre_ast_path))
    logging.info('DONE')

    if not preprocessed_ast:
        raise RuntimeError("Unable to load pre-ast file.")

    manager = layout_manager.ProfileBuilder(
        preprocessed_ast, config_text, system_map_text,
        cache_dir=args.cache_dir)

    profile = manager.create_profile(args.layouts_to_compute)

    with open(args.output, "wb") as fd:
        fd.write(rekall_utils.PPrint(profile))
예제 #16
0
    def fetch_and_parse(self, module_name=None, guid=None, renderer=None):
        if module_name is None:
            module_name = self.module_name

        if guid is None:
            guid = self.guid

        # Allow the user to specify the required profile by name.
        m = re.match("([^/]+)/GUID/([^/]+)$", module_name)
        if m:
            module_name = m.group(1)
            guid = m.group(2)

        if not guid or not module_name:
            raise TypeError("GUID not specified.")

        profile_name = "{0}/GUID/{1}".format(module_name.lower(), guid)

        # Get the first repository to write to.
        repository = self.session.repository_managers[0][1]
        if module_name != "nt":
            data = self._fetch_and_parse(module_name, guid)

            if self.dumpfile:
                with renderer.open(filename=self.dumpfile, mode="wb") as fd:
                    fd.write(utils.PPrint(data))

            return repository.StoreData(profile_name, data)

        for module_name in common.KERNEL_NAMES:
            if module_name.endswith(".pdb"):
                module_name, _ = os.path.splitext(module_name)
            try:
                data = self._fetch_and_parse(module_name, guid)
                self.session.logging.warning(
                    "Profile %s fetched and built. Please "
                    "consider reporting this profile to the "
                    "Rekall team so we may add it to the public "
                    "profile repository.", profile_name)

                return repository.StoreData(profile_name, data)
            except IOError, e:
                self.session.logging.error("Error: %s", e)
예제 #17
0
def RebuildInventory():
    inventory = {}
    metadata = dict(Type="Inventory", ProfileClass="Inventory")

    result = {"$METADATA": metadata, "$INVENTORY": inventory}

    for root, _, files in os.walk('./'):
        for filename in files:
            if filename.endswith(".gz"):
                path = os.path.join(root, filename)
                session.report_progress("Adding %s to inventory", path)
                with gzip.GzipFile(filename=path, mode="rb") as fd:
                    data = json.load(fd)

                    profile_name = os.path.join(root[2:], filename[:-3])
                    inventory[profile_name] = data["$METADATA"]

    with gzip.GzipFile(filename="inventory.gz", mode="wb") as outfd:
        outfd.write(utils.PPrint(result))
예제 #18
0
def _load_ast_compute_and_dump_profile(
    result_paths,
    config_file_path,
    system_map_file_path,
    layouts_to_compute,
    decoder,
):
    """A procedure that loads AST, computes the profile and stores it."""
    program = _load_and_decode(result_paths.ast_file_path, decoder)
    layouts, types = _get_layouts_and_types(program, layouts_to_compute)
    vtypes = _get_vtypes(layouts, types)

    linux_profile_converter = profile_tool.LinuxConverter(None, None)
    system_map = _load_from_string(system_map_file_path)
    system_map = linux_profile_converter.ParseSystemMap(system_map)

    config = _load_from_string(config_file_path)
    config = linux_profile_converter.ParseConfigFile(config)

    profile = linux_profile_converter.BuildProfile(system_map, vtypes, config)

    with open(result_paths.profile_file_path, 'w') as profile_file:
        profile_file.write(utils.PPrint(profile))
예제 #19
0
    def Encoder(self, data, **options):
        if self.pretty_print:
            return utils.PPrint(data)

        return json.dumps(data, sort_keys=True, **options)
예제 #20
0
        r"(---.*?\.\.\.)\n<bin>(.+?)</bin>", data, re.M | re.S):
        offset, _ = match.span(2)

        # Replace the assembled segment with a base64 equivalent.
        segment = yaml.safe_load(match.group(1))
        segment["offset"] = offset - origin
        segment["data"] = match.group(2).encode("base64").strip()
        test_cases.append(segment)

    return test_cases

def BuildTestCases(filename, output="tmp.o"):
    if "64" in filename:
        mode = "elf64"
    else:
        mode = "elf"

    subprocess.check_call(["nasm", "-f", mode, "-O0", filename, "-o", output])

    return ExtractTestCases(open(output, "rb").read())

profile = dict(AMD64=BuildTestCases("amd64.asm"),
               I386=BuildTestCases("i386.asm"))

profile["$METADATA"] = dict(
    ProfileClass="TestProfile"
    )

print(utils.PPrint(profile))

예제 #21
0
 def WriteProfile(self, profile_file):
     self.output.write(utils.PPrint(profile_file))
예제 #22
0
 def render(self, renderer):
     spec = self.io_manager.GetData(self.spec)
     renderer.write(utils.PPrint(self.build_index(spec)))
예제 #23
0
    def render(self, renderer):
        with renderer.open(filename=self.spec, mode="rb") as fd:
            spec = yaml.safe_load(fd)

        index = {}
        metadata = dict(Type="Profile", ProfileClass="Index")

        result = {"$METADATA": metadata, "$INDEX": index}

        repository_root = spec["repository_root"]
        highest_offset = 0
        lowest_offset = float("inf")
        base_sym = spec.get("base_symbol", None)

        for root, _, files in os.walk(
                os.path.join(repository_root, spec["path"])):
            for name in files:
                path = os.path.join(root, name)
                relative_path = os.path.splitext(
                    path[len(repository_root):])[0]

                if path.endswith(".gz"):
                    self.session.report_progress("Processing %s",
                                                 relative_path)
                    try:
                        file_data = gzip.open(path).read()
                        data = json.loads(file_data)
                    except Exception:
                        continue

                    index[relative_path] = []
                    for sym_spec in spec["symbols"]:
                        shift = sym_spec.get("shift", 0)

                        if "$CONSTANTS" not in data:
                            continue

                        offset = data["$CONSTANTS"].get(sym_spec["name"])

                        if not offset:
                            continue

                        # Offsets (as well as min/max offset) are computed
                        # relative to base.
                        base = self._decide_base(data=data,
                                                 base_symbol=base_sym)

                        # If we got a base symbol but it's not in the constants
                        # then that means this profile is incompatible with this
                        # index and should be skipped.
                        if base == None:
                            continue

                        # We don't record the offset as reported by the profile
                        # but as the reader is actually going to use it.
                        offset = offset + shift - base

                        values = []
                        # If a symbol's expected value is prefixed with
                        # 'string:' then that means it was given to us as
                        # human-readable and we need to encode it. Otherwise it
                        # should already be hex-encoded.
                        raw_prefix = "string:"
                        for value in sym_spec["data"]:
                            if value.startswith(raw_prefix):
                                value = value[len(raw_prefix):].encode("hex")

                            values.append(value)

                        index[relative_path].append((offset, values))

                        # Compute the lowest and highest offsets so the reader
                        # can optimize reading the image.
                        lowest_offset = min(lowest_offset, offset)
                        highest_offset = max(highest_offset,
                                             offset + len(sym_spec["data"]))

        metadata["BaseSymbol"] = base_sym
        metadata["MaxOffset"] = highest_offset
        metadata["MinOffset"] = lowest_offset

        renderer.write(utils.PPrint(result))