示例#1
0
def run_harness_generation(view, func):
    """ Experimental automatic fuzzer harness generation support """

    template_file = os.path.join(binaryninja.user_plugin_path(), "fuzzable")
    if view.view_type == "ELF":
        template_file += "/templates/linux.cpp"
    else:
        interaction.show_message_box(
            "Error",
            "Experimental harness generation is only supported for ELFs at the moment",
        )
        return

    # parse out template based on executable format, and start replacing
    with open(template_file, "r") as fd:
        template = fd.read()

    log.log_info("Replacing elements in template")
    template = template.replace("{NAME}", func.name)
    template = template.replace("{RET_TYPE}", str(func.return_type))

    harness = interaction.get_save_filename_input("Filename to write to?",
                                                  "cpp")
    harness = csv_file.decode("utf-8") + ".cpp"

    log.log_info("Writing new template to workspace")
    with open(harness, "w+") as fd:
        fd.write(template)

    interaction.show_message_box("Success",
                                 f"Done, wrote fuzzer harness to {harness}")
示例#2
0
def restore_symbols(view: binaryninja.binaryview.BinaryView,
                    gopclntab: binaryninja.binaryview.Section) -> None:
    ptr_size = view.address_size
    """
    the .gopclntab table (starting at .gopclntab + 8) consists of
        N pc0 func0 pc1 func1 pc2 func2 ... pc(N-1) func(N-1) pcN
        
        N := no of elemements in table
        pcX := pointer to the function
        funcX := pointer to struct Func
        
        struct Func {
                uintptr entry;   // start pc
                int32 name;      // name (offset to C string)
                int32 args;      // size of arguments passed to function
                int32 frame;     // size of function frame, including saved caller PC
                int32 pcsp;      // pcsp table (offset to pcvalue table)
                int32 pcfile;    // pcfile table (offset to pcvalue table)
                int32 pcln;      // pcln table (offset to pcvalue table)
                int32 nfuncdata; // number of entries in funcdata list
                int32 npcdata;   // number of entries in pcdata list
        }; 
    src: https://docs.google.com/document/d/1lyPIbmsYbXnpNj57a261hgOYVpNRcgydurVQIyZOz_o/pub
    """
    # validate magic bytes
    magic_bytes_found = view.read(gopclntab.start, 4)
    magic_bytes_expected = b'\xfb\xff\xff\xff'
    if magic_bytes_expected != magic_bytes_found:
        log_error("Invalid .gopclntab section. Aborting!")
        return

    # get number of elements and calculate last address
    size_addr = gopclntab.start + 8  # skip first 8 bytes
    size = get_pointer_LE(view, size_addr)
    start_addr = size_addr + ptr_size
    end_addr = gopclntab.start + 8 + ptr_size + (size * ptr_size * 2)

    # iterate over the table and restore function names
    for current_addr in range(start_addr, end_addr, 2 * ptr_size):
        function_addr = get_pointer_LE(view, current_addr)
        struct_func_offset = get_pointer_LE(view, current_addr + ptr_size)
        name_str_offset = get_dword_LE(
            view, gopclntab.start + struct_func_offset + ptr_size)
        name_addr = gopclntab.start + name_str_offset
        function_name = view.get_ascii_string_at(name_addr)
        if not function_name:
            continue
        log_info(
            f'found name "{function_name}" for function starting at 0x{function_addr:x}'
        )
        function = view.get_function_at(function_addr)
        if not function:
            view.create_user_function(function_addr)
            function = view.get_function_at(function_addr)

        new_func_comment = function_name.value
        if function.comment:
            new_func_comment += "\n\n{}".format(function.comment)
        function.comment = new_func_comment
        function.name = sanitize_func_name(function_name.value)
示例#3
0
def save_patches(view):
    patches = get_all_patches(view)

    if patches:
        log.log_info('Stored {0} patches to {1}'.format(len(patches), view))

        view.store_metadata(OBFU_KEY, pickle.dumps(patches))
示例#4
0
    def __init__(self, target):
        # parse basic function identifying info
        self.name = target.name
        log.log_info(f"Starting analysis for: function: {self.name}")

        # analyze function name properties
        self.stripped = "sub_" in self.name
        self.interesting_name = False
        if not self.stripped:
            self.interesting_name = any([
                pattern in self.name or pattern.lower() in self.name
                for pattern in INTERESTING_PATTERNS
            ])

        # analyze function arguments for fuzzable patterns
        self.args = target.parameter_vars
        self.interesting_args = False
        for arg in self.args:
            if arg.type == "char*":
                self.interesting_args = True
                break

        # a higher depth means more code coverage for the fuzzer, makes function more viable for testing
        # recursive calls to self mean higher cyclomatic complexity, also increases viability for testing
        (
            self.depth,
            self.recursive,
            self.visited,
        ) = FuzzableAnalysis.get_callgraph_complexity(target)

        # natural loop / iteration detected is often good behavior for a fuzzer to test, such as walking/scanning over
        # input data (aka might be a good place to find off-by-ones). Does not account for any type of basic-block obfuscation.
        self.has_loop = FuzzableAnalysis.contains_loop(target)
示例#5
0
    def init_common(self):
        self.hdr_offset = 0x20

        self.hdr = self.raw.read(0, self.hdr_offset)
        self.architecture = self.check_magic(self.hdr[0x0:0x4])
        if (self.architecture != False):
            self.platform = Architecture[self.architecture].standalone_platform
        else:
            log_info("Not a valid a.out file!")
            return False

        self.base_addr = 0x1000
        if self.architecture == "x86_64":  # 6l
            self.base_addr = 0x200000
            self.hdr_offset += 0x08

        self.load_addr = self.hdr_offset

        # typedef struct Exec {
        #          long       magic;      /* magic number */                    0x00-0x04
        #          long       text;       /* size of text segment */            0x04-0x08
        #          long       data;       /* size of initialized data */        0x08-0x0C
        #          long       bss;        /* size of uninitialized data */      0x0C-0x10
        #          long       syms;       /* size of symbol table */            0x10-0x14
        #          long       entry;      /* entry point */                     0x14-0x18
        #          long       spsz;       /* size of pc/sp offset table */      0x18-0x1C
        #          long       pcsz;       /* size of pc/line number table */    0x1C-0x20
        # } Exec;
        self.size = struct.unpack(">L", self.hdr[0x04:0x08])[0]
        self.data_size = struct.unpack(">L", self.hdr[0x08:0xC])[0]
        self.bss_size = struct.unpack(">L", self.hdr[0xC:0x10])[0]
        self.syms_size = struct.unpack(">L", self.hdr[0x10:0x14])[0]
        self.entry_addr = struct.unpack(">L", self.hdr[0x14:0x18])[0]
        self.pcsp_offset = struct.unpack(">L", self.hdr[0x18:0x1C])[0]
        self.pcline_offset = struct.unpack(">L", self.hdr[0x1C:0x20])[0]
示例#6
0
    def write(self, data):
        interpreter = None
        if hasattr(PythonScriptingInstance._interpreter, "value"):
            interpreter = PythonScriptingInstance._interpreter.value

        if interpreter is None:
            if log.is_output_redirected_to_log():
                self.buffer += data
                while True:
                    i = self.buffer.find('\n')
                    if i == -1:
                        break
                    line = self.buffer[:i]
                    self.buffer = self.buffer[i + 1:]

                    if self.is_error:
                        log.log_error(line)
                    else:
                        log.log_info(line)
            else:
                self.orig.write(data)
        else:
            PythonScriptingInstance._interpreter.value = None
            try:
                if self.is_error:
                    interpreter.instance.error(data)
                else:
                    interpreter.instance.output(data)
            finally:
                PythonScriptingInstance._interpreter.value = interpreter
示例#7
0
    def decompile(self, inputfile):
        # On Windows, autodeleting temp files are locked by their owning process,
        # meaning you can't use them to pass input to other programs. Using delete=False and
        # unlinking after is an accepted workaround for this.
        # See https://bugs.python.org/issue14243
        tmpfilename = None
        with tempfile.NamedTemporaryFile(mode='w', delete=False) as conf:
            tmpfilename = conf.name
            json.dump(self.conf.dump(), conf)
            conf.flush()
            self._cmdline.extend(['--config', conf.name])
            self._cmdline.append(inputfile)
            log.log_info(" ".join(self._cmdline))

            shell = False
            if os.name == 'nt':
                shell = True
            p = Popen(self._cmdline, stdout=PIPE, stderr=PIPE, shell=shell)
            _, err = p.communicate()
            log.log_info(err)
            if err.startswith('Error'):
                raise ExceptionWithMessageBox('decompilation error', err)

            with open('{}.c'.format(inputfile), 'r') as f:
                code = f.read()

            os.unlink('{}.c'.format(inputfile))
            f = '{}.c.frontend.dsm'.format(inputfile)
            if os.path.exists(f):
                os.unlink(f)

        os.unlink(tmpfilename)

        return code
示例#8
0
 def is_valid_for_data(self, data):
     hdr = data.read(0, self.HDR_SIZE)
     if len(hdr) < self.HDR_SIZE:
         return False
     if hdr[0:8] != "PS-X EXE":
         return False
     log_info("PSX EXE identified")
     return True
示例#9
0
def main(args):
    log_to_stdout(LogLevel.InfoLog)

    if not os.path.exists(args.input_file):
        log_warn(f"input file: {args.input_file} does not exist")
        return 1

    dir_path = os.path.dirname(os.path.realpath(args.output))
    if not os.path.exists(dir_path):
        log_warn(f"Output path directory {dir_path} does not exist")
        return 1

    try:
        platform: Platform = Platform[args.platform]
    except KeyError:
        log_warn(f"'{args.platform}' is not supported binja platform")
        return 1

    with open(args.input_file) as fd:
        type_data = fd.read()

    if args.definitions:
        prepend_str = ""
        for defintion in args.definitions.split(","):
            prepend_str += f"#define {defintion} 1\n"
        type_data = "%s%s" % (prepend_str, type_data)

    types_path = [os.path.dirname(os.path.realpath(args.input_file))]

    type_res = platform.parse_types_from_source(type_data, filename=args.input_file, include_dirs=types_path)

    cur_typelib: TypeLibrary = TypeLibrary.new(Architecture[platform.arch.name], args.name)

    for name, type_obj in type_res.functions.items():
        # log_info(f"Adding function {name}")
        cur_typelib.add_named_object(name, type_obj)

    for name, type_obj in type_res.types.items():
        # log_info(f"Adding type {name}")
        cur_typelib.add_named_type(name, type_obj)

    cur_typelib.add_platform(platform)

    if args.alt_names:
        for name in args.alt_names.split(","):
            cur_typelib.add_alternate_name(name)

    if args.guid:
        cur_typelib.guid = args.guid

    cur_typelib.finalize()

    log_info(f"Wrote type library to {args.output}")
    cur_typelib.write_to_file(args.output)

    return 0
def restore_golang_symbols(view: binaryninja.binaryview.BinaryView):
    for section in view.sections.values():
        if is_gopclntab_section(view, section):
            log_info(f"found .gopclntab at 0x{section.start}")
            restore_symbols(view, section)
            break
    else:
        log_error("Could not find .gopclntab section. "
                  "If this is really a Golang binary you can annotate "
                  "the section manually by naming it '.gopclntab'")
示例#11
0
	def is_valid_for_data(self, data):
		hdr = data.read(0, 128)
		if len(hdr) < 128:
			return False
		if hdr[0:5] != "NESM\x1a":
			return False
		song_count = struct.unpack("B", hdr[6])[0]
		if song_count < 1:
			log_info("Appears to be an NSF, but no songs.")
			return False
		return True
示例#12
0
 def is_valid_for_data(self, data):
     hdr = data.read(0, 128)
     if len(hdr) < 128:
         return False
     if hdr[0:5] != "NESM\x1a":
         return False
     song_count = struct.unpack("B", hdr[6])[0]
     if song_count < 1:
         log_info("Appears to be an NSF, but no songs.")
         return False
     return True
示例#13
0
def squash_and_relabel(g):
    if not nx.is_directed_acyclic_graph(g):
        log.log_info("Squashing loops")
        g = nx.algorithms.components.condensation(
            g)  # one line to squash the loops
        # seven lines to rename the nodes
        mapping = {}
        for i in range(len(g.nodes)):
            if len(g.nodes[i]['members']) == 1:
                mapping[i] = g.nodes[i]['members'].pop()
            else:
                mapping[i] = 'U'.join(n for n in g.nodes[i]['members'])
        return nx.relabel_nodes(g, mapping)
示例#14
0
def get_all_patches(view):
    session_data = view.session_data

    if OBFU_KEY not in session_data:
        patches = dict()
        try:
            patches = pickle.loads(view.query_metadata(OBFU_KEY))
            log.log_info('Loaded {0} patches'.format(len(patches)))
        except:
            pass
        session_data[OBFU_KEY] = patches

    return session_data[OBFU_KEY]
示例#15
0
def parse_unwind_info(thread, view):
    base_address = view.start

    pe = read_pe_header(view)

    unwind_directory = pe.OPTIONAL_HEADER.DATA_DIRECTORY[3]
    unwind_entrys = base_address + unwind_directory.VirtualAddress
    unwind_entrys_end = unwind_entrys + unwind_directory.Size

    funcs = set()

    log.log_info('Exception Data @ 0x{0:X} => 0x{1:X}'.format(
        unwind_entrys, unwind_entrys_end))

    for runtime_address in range(unwind_entrys, unwind_entrys_end, 12):
        if thread.cancelled:
            break

        update_percentage(
            thread, unwind_entrys, unwind_entrys_end, runtime_address,
            'Parsing Unwind Info - Found {0} functions'.format(len(funcs)))

        runtime_function, _ = read_runtime_function(view, runtime_address)

        if runtime_function is None:
            continue

        start_address = runtime_function['BeginAddress']

        if not view.is_offset_executable(start_address):
            continue
        if view.get_functions_containing(start_address):
            continue

        info_address = runtime_function['UnwindData']
        unwind_info, _ = read_unwind_info(view, info_address)

        if unwind_info is None:
            continue

        if 'FunctionEntry' in unwind_info:
            continue

        funcs.add(start_address)

    if not thread.cancelled:
        thread.progress = 'Creating {0} Function'.format(len(funcs))
        log.log_info('Found {0} functions'.format(len(funcs)))

        for func in funcs:
            view.create_user_function(func)
示例#16
0
    def __init__(self):
        elapsed = time.time()

        spec = specification.InstructionSpec()
        self.decoders = spec.refine(ana.operand_decoders)
        self.branches = spec.refine(emu.branch_type)
        self.text = spec.refine(out.tokens)
        self.llil = spec.refine(lowlevelil.low_level_il)

        # FIXME hack until I refactor this a bit:
        self.unlifted = lowlevelil.unlifted_todo(spec.spec, self.llil)

        elapsed = time.time() - elapsed
        log_info('Building 8051 tables took %0.3f seconds' % elapsed)
示例#17
0
def fix_x86_conventions(thread, view):
    count = 0

    for func in view.functions:
        if thread.cancelled:
            break
        if func.arch.name != 'x86':
            return
        cc = get_proper_cc(func)
        if cc is not None:
            func.calling_convention = func.arch.calling_conventions[cc]
            thread.progress = 'Fixed {0}'.format(func.name)
            count += 1

    log.log_info('Fixed {0} functions\'s'.format(count))
示例#18
0
def load_platform_libraries():
    try:
        for p in list(Platform):
            path = typelib_path / p.name
            path.mkdir(parents=True, exist_ok=True)

    except IOError:
        log_error("Unable to create {}".format(lib_path))

    for p in typelib_path.iterdir():
        platform = Platform[p.name]

        for typelib_file in p.iterdir():
            tl = TypeLibrary.load_from_file(str(typelib_file))
            platform.type_libraries.append(tl)
            log_info("Loaded type library: {}".format(typelib_file))
示例#19
0
def run_plugin(view):
    log_info("Locating PSX BIOS calls")
    for f in view.functions:
        if len(f.medium_level_il) == 2:
            tok0 = f.medium_level_il[0].tokens
            if str(tok0[0]) == '$t1' and str(tok0[1]) == ' = ':
                callnr = int(str(tok0[2]), 16)
                tok1 = f.medium_level_il[1].tokens
                if str(tok1[0]) == 'jump(' and str(tok1[2]) == ')':
                    calladdr = int(str(tok1[1]), 16)
                    safe_psx_set_type(view, f, calladdr, callnr)
            # TODO: Only verified for syscall(2) stub
            if str(tok0[0]) == '$v0' and str(tok0[1]) == ' = ' and str(
                    tok0[2]) == 'syscall':
                callnr = int(str(tok0[4]))
                safe_psx_set_type(view, f, 0, callnr)
示例#20
0
def patch_opaque(bv, status=None):
    analysis_pass = 0
    while True:
        analysis_pass += 1
        patch_locations = patch_opaque_inner(bv, status)
        if len(patch_locations) == 0 or analysis_pass == 10 or (
                status is not None and status.cancelled):
            break
        for address, always in patch_locations:
            if always:
                log_info("Patching instruction {} to never branch.".format(
                    hex(address)))
                bv.never_branch(address)
            else:
                log_info("Patching instruction {} to always branch.".format(
                    hex(address)))
                bv.always_branch(address)
示例#21
0
    def lazy_disasm(self, address) -> (str, int):
        """
        :param address: int representing the program counter
        :returns instruction:
        :returns instruction_len:
        """
        instruction = "NONE"
        instruction_len = 0
        not_dword = False

        try:
            instruction = self.inst_map[address]
            instruction_len = 4  # HARDCODED
        except KeyError:
            log_info("--FAILED to decode")
            pass

        return instruction, instruction_len
def define_str_var(bv, addr):
    a = get_address_from_inst(bv, addr)
    if not a:
        a = addr
    data = bv.read(a, MAX_STRING_LENGTH)
    if not data:
        log_alert("failed to read from 0x{:x}".format(a))
    if b"\x00" in data:
        length = data.find(b"\x00") + 1
    else:
        log_info("not a null-terminated string: {!r}".format(data))
        log_alert("doesn't look like a null-terminated-string")
        return
    varname = get_string_varname(data[:length])
    t = bv.parse_type_string("char {}[{}]".format(varname, length))
    bv.define_user_data_var(a, t[0])
    sym = binaryninja.types.Symbol('DataSymbol', a, varname[:21], varname)
    bv.define_user_symbol(sym)
示例#23
0
文件: RelView.py 项目: Vector35/Z80
    def init(self):
        self.arch = Architecture['Z80']
        self.platform = Architecture['Z80'].standalone_platform

        syms = []
        have_code = False

        for line in self.parent_view.read(0, len(self.parent_view)).split(b'\x0a'):
            line = line.decode('utf-8')

            # AREA line -> create a section
            match = re.match(r'A _CODE size (.*) flags (.*) addr (.*)', line)
            if match:
                (size, flags, addr) = (int(x, 16) for x in match.group(1, 2, 3))
                assert flags == 0
                assert addr == 0
                log_info('adding _CODE [%X, %X)' % (addr, addr+size))

                self.add_auto_segment(addr, size, 0, 0, SegmentFlag.SegmentReadable | SegmentFlag.SegmentWritable | SegmentFlag.SegmentExecutable)
                
                #self.add_user_section('_CODE', addr, size, SectionSemantics.ReadOnlyCodeSectionSemantics)
                have_code = True
                continue

            # WRITE line -> write bytes to section
            match = re.match(r'^T (.. ..) (.*)', line)
            if match:
                (addr, data) = match.group(1, 2)
                # eg: "04 00" -> 0x0004
                addr = int(addr[3:5] + addr[0:2], 16)
                # eg: "AA BB CC DD" -> b'\xAA\xBB\xCC\xDD'
                data = b''.join([pack('B', int(x, 16)) for x in data.split(' ')])
                log_info('writing to %X: %s' % (addr, match.group(2)))
                self.write(addr, data) 
                continue 

            # SYMBOL line -> store
            match = re.match(r'^S (.+) Def(.*)', line)
            if match:
                (name, addr) = match.group(1, 2)
                if not name in ['.__.ABS.', '.  .ABS']:
                    addr = int(addr, 16)
                    log_info('saving symbol %s @ %X' % (name, addr))
                    syms.append((name, addr))
                    continue
        
        assert have_code
        for (name, addr) in syms:
            log_info('applying symbol %s @ %X' % (name, addr))
            self.define_auto_symbol(Symbol(SymbolType.FunctionSymbol, addr, name))
            self.add_function(addr)

        return True
示例#24
0
    def get_instruction_text(self, data, addr):
        log_info(str(data))
        (instruction_text, instruction_len) = dis.lazy_disasm(addr)
        tokens = [
            InstructionTextToken(InstructionTextTokenType.TextToken,
                                 instruction_text)
        ]

        text_tokens = instruction_text.split()
        instruction = text_tokens[1].lower()
        raw_offset = text_tokens[-1]

        if op.instruction_is_call(instruction):
            call_offset = addr + int(raw_offset, 16)
            tokens.append(
                InstructionTextToken(
                    InstructionTextTokenType.PossibleAddressToken,
                    hex(call_offset), call_offset))

        return tokens, instruction_len
示例#25
0
def run_export_report(view):
    """ Generate a report from a previous analysis, and export as CSV """
    log.log_info("Attempting to export results to CSV")
    try:
        csv_output = view.query_metadata("csv")
    except KeyError:
        interaction.show_message_box(
            "Error", "Cannot export without running an analysis first.")
        return

    # write last analysis to filepath
    csv_file = interaction.get_save_filename_input(
        "Filename to export as CSV?", "csv")
    csv_file = csv_file.decode("utf-8") + ".csv"

    log.log_info(f"Writing to filepath {csv_file}")
    with open(csv_file, "w+") as fd:
        fd.write(csv_output)

    interaction.show_message_box("Success", f"Done, exported to {csv_file}")
示例#26
0
def analyze_dependencies(bv):
    """ Get all imported symbols, analyze dependencies and apply found information """
    # Get all imported modules and symbols
    candidates = {}
    for type in (SymbolType.ImportAddressSymbol,
                 SymbolType.ImportedFunctionSymbol,
                 SymbolType.ImportedDataSymbol):
        for sym in bv.get_symbols_of_type(type):
            ident = get_identifier(bv, sym)
            if ident is None:
                continue
            module = normalize_module(get_symbol_module(sym))
            mod_syms = candidates.setdefault(module, {})
            these_syms = mod_syms.setdefault(ident, [])
            these_syms.append(sym)

    # Find any associated dependency files and process them
    for module, filename in find_possible_dependencies(bv, candidates.keys()):
        log_info('Processing: {}...'.format(filename))
        analyze_dependency(bv, module, filename,
                           candidates[normalize_module(module)])
示例#27
0
def ksImportClass(moduleName):
    global __name__, __package__
    if not moduleName:
        return None

    classThing = None
    try:
        #log.log_debug('__package__: -%s-' % __package__)	# 'kaitai'
        #log.log_debug('__name__: -%s-' % __name__)			# 'kaitai.kshelpers'
        #log.log_debug('moduleName: -%s-' % moduleName)
        #log.log_debug('importlib.import_module(.%s, %s)' % (moduleName, __package__))
        log.log_info('importing kaitai module %s' % moduleName)
        module = importlib.import_module('.' + moduleName, __package__)
        className = ksModuleToClass(moduleName)
        #log.log_debug('className: -%s-' % className)
        classThing = getattr(module, className)
    except Exception as e:
        log.log_error('importing kaitai module %s' % moduleName)
        pass

    return classThing
示例#28
0
    def decompile(self, inputfile):
        with tempfile.NamedTemporaryFile(mode='w') as conf:
            json.dump(self.conf.dump(), conf)
            conf.flush()
            self._cmdline.extend(['--config', conf.name])
            self._cmdline.append(inputfile)
            log.log_info(" ".join(self._cmdline))

            p = Popen(self._cmdline, stdout=PIPE, stderr=PIPE)
            _, err = p.communicate()
            log.log_info(err)
            if err.startswith('Error'):
                raise ExceptionWithMessageBox('decompilation error', err)

            with open('{}.c'.format(inputfile), 'r') as f:
                code = f.read()

            os.unlink('{}.c'.format(inputfile))
            os.unlink('{}.c.frontend.dsm'.format(inputfile))

        return code
示例#29
0
def load_svd(bv, svd_file=None):
    if not svd_file:
        svd_file = get_open_filename_input("SVD File")
    if isinstance(svd_file, str):
        svd_file = bytes(svd_file, encoding="utf-8")
    if not os.access(svd_file, os.R_OK):
        log_error(f"SVD Browser: Unable to open {svd_file}")
        return
    log_info(f"SVD Loader: Loading {svd_file}")
    device = parse(svd_file)
    peripherals = device['peripherals'].values()
    base_peripherals = [p for p in peripherals if 'derives' not in p]
    derived_peripherals = [p for p in peripherals if 'derives' in p]

    def register_peripheral(p, struct_type):
        bv.add_user_section(p['name'], p['base'], p['size'],
                            SectionSemantics.ReadWriteDataSectionSemantics)
        bv.add_user_segment(
            p['base'], p['size'], 0, 0, SegmentFlag.SegmentContainsData
            | SegmentFlag.SegmentReadable | SegmentFlag.SegmentWritable)
        bv.define_data_var(p['base'], struct_type)
        bv.define_user_symbol(
            Symbol(SymbolType.ImportedDataSymbol, p['base'], p['name']))

    for p in base_peripherals:
        s = Structure()
        for r in p['registers'].values():
            if r['size'] is None:
                s.insert(r['offset'], Type.int(4, False), r['name'])
            else:
                s.insert(r['offset'], Type.int(int(r['size'] / 8), False),
                         r['name'])
        struct_type = Type.structure_type(s)
        bv.define_user_type(p['name'], struct_type)
        register_peripheral(p, struct_type)

    for p in derived_peripherals:
        struct_type = bv.get_type_by_name(
            device['peripherals'][p['derives']]['name'])
        register_peripheral(p, struct_type)
示例#30
0
文件: sync.py 项目: jeffli678/peutils
def resolve_imports_for_library(bv, lib):
    source_bv = peutils.files[lib.name.lower()]
    exports = pe_parsing.get_exports(source_bv)

    for import_ in lib.imports:
        # Find the name
        name = None
        for export in exports:
            if export.ord == import_.ordinal:
                print(export)
                name = export.name
                export_symbol = export.symbol

        if not name:
            log_warn("Unable to find name for %r" % import_)

        # Redefine the IAT thunk symbol
        original_symbol = bv.get_symbol_at(import_.datavar_addr)

        # Delete any existing auto symbols
        if original_symbol:
            log_info("Renaming %s to %s:%s" %
                     (original_symbol.name, lib.name, name))
            bv.undefine_auto_symbol(original_symbol)
        else:
            log_info("Creating IAT symbol %s:%s @ %08x" %
                     (lib.name.split(".")[0], name, import_.datavar_addr))

        # Create the new symbol
        bv.define_auto_symbol(
            Symbol(
                SymbolType.ImportAddressSymbol,
                import_.datavar_addr,
                name + "@IAT",
                namespace=lib.name.split(".")[0],
            ))

        # Transplant type info
        export_func = source_bv.get_function_at(export_symbol.address)
        type_tokens = [token.text for token in export_func.type_tokens]
        i = type_tokens.index(export_symbol.name)
        type_tokens[i] = "(*const func_name)"

        type_string = "".join(type_tokens)
        log_info("Setting type for %s to %r" % (name, type_string))

        try:
            (type_, name) = bv.parse_type_string(type_string)
        except:
            log_error("Invalid type, skipping")

        bv.define_data_var(import_.datavar_addr, type_)

        # FIXME: Apply params to ImportedFunctionSymbols -- check xref on
        # datavar and filter by associated symbols
        # This doesn't actually seem to help and apparently I didn't have to do
        # this before? Maybe I just didn't handle jump
        """
示例#31
0
    def rename(self):
        renamed = 0
        newprocfn = self.bv.get_symbol_by_raw_name("go.runtime.newproc")
        xrefs = self.bv.get_code_refs(newprocfn.address)
        for xref in xrefs:
            log_info("found xref at 0x{:x}".format(xref.address))
            addr = xref.address
            fn = self.get_function_around(addr)
            callinst = fn.get_low_level_il_at(addr)
            if callinst.operation != bn.LowLevelILOperation.LLIL_CALL:
                log_debug("not a call instruction {!r}".format(callinst))
                continue
            params = []
            # FIXME: this is such a dirty hack
            # get the previous two LIL instruction
            j = 1
            while len(params) < 2:
                for i in range(1, 7):
                    try:
                        j += 1
                        inst = fn.get_low_level_il_at(addr - j)
                        log_debug("instruction: -{} {!r}".format(j, inst))
                        break
                    except IndexError:
                        continue
                params.append(inst)

            # FIXME: does this work on non-x86?
            # check if 2 push instructions
            skip = True
            for inst in params:
                if inst.operation != bn.LowLevelILOperation.LLIL_PUSH:
                    skip = True
            if skip:
                continue

            # get the address of the function pointer, which should be the
            # second push instruction
            inst = params[1]
            fptr = inst.src.value.value
            log_info("found call to newproc {!r} with fptr {!r}".format(
                callinst, fptr))

            if fptr and not self.bv.get_symbol_at(fptr):
                a = self.get_pointer_at_virt(fptr)
                # target function
                tfn = self.bv.get_function_at(a)
                if tfn:
                    varname = "fptr_"
                    varname += sanitize_var_name(tfn.name)
                    t = self.bv.parse_type_string("void*")
                    self.bv.define_user_data_var(a, t[0])
                    sym = bn.types.Symbol('DataSymbol', a, varname, varname)
                    self.bv.define_user_symbol(sym)
                    renamed += 1

        log_info(
            "renamed {} function pointers, passed to newproc".format(renamed))
示例#32
0
	def init(self):
		try:
			hdr = self.parent_view.read(0, 128)
			self.version = struct.unpack("B", hdr[5])[0]
			self.song_count = struct.unpack("B", hdr[6])[0]
			self.starting_song = struct.unpack("B", hdr[7])[0]
			self.load_address = struct.unpack("<H", hdr[8:10])[0]
			self.init_address = struct.unpack("<H", hdr[10:12])[0]
			self.play_address = struct.unpack("<H", hdr[12:14])[0]
			self.song_name = hdr[15].split('\0')[0]
			self.artist_name = hdr[46].split('\0')[0]
			self.copyright_name = hdr[78].split('\0')[0]
			self.play_speed_ntsc = struct.unpack("<H", hdr[110:112])[0]
			self.bank_switching = hdr[112:120]
			self.play_speed_pal = struct.unpack("<H", hdr[120:122])[0]
			self.pal_ntsc_bits = struct.unpack("B", hdr[122])[0]
			self.pal = True if (self.pal_ntsc_bits & 1) == 1 else False
			self.ntsc = not self.pal
			if self.pal_ntsc_bits & 2 == 2:
				self.pal = True
				self.ntsc = True
			self.extra_sound_bits = struct.unpack("B", hdr[123])[0]

			if self.bank_switching == "\0" * 8:
				# no bank switching
				self.load_address & 0xFFF
				self.rom_offset = 128

			else:
				# bank switching not implemented
				log_info("Bank switching not implemented in this loader.")

			# Add mapping for RAM and hardware registers, not backed by file contents
			self.add_auto_segment(0, 0x8000, 0, 0, SegmentFlag.SegmentReadable | SegmentFlag.SegmentWritable | SegmentFlag.SegmentExecutable)

			# Add ROM mappings
			self.add_auto_segment(0x8000, 0x4000, self.rom_offset, 0x4000,
				SegmentFlag.SegmentReadable | SegmentFlag.SegmentExecutable)

			self.define_auto_symbol(Symbol(SymbolType.FunctionSymbol, self.play_address, "_play"))
			self.define_auto_symbol(Symbol(SymbolType.FunctionSymbol, self.init_address, "_init"))
			self.add_entry_point(self.init_address)
			self.add_function(self.play_address)

			# Hardware registers
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x2000, "PPUCTRL"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x2001, "PPUMASK"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x2002, "PPUSTATUS"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x2003, "OAMADDR"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x2004, "OAMDATA"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x2005, "PPUSCROLL"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x2006, "PPUADDR"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x2007, "PPUDATA"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4000, "SQ1_VOL"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4001, "SQ1_SWEEP"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4002, "SQ1_LO"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4003, "SQ1_HI"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4004, "SQ2_VOL"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4005, "SQ2_SWEEP"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4006, "SQ2_LO"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4007, "SQ2_HI"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4008, "TRI_LINEAR"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x400a, "TRI_LO"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x400b, "TRI_HI"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x400c, "NOISE_VOL"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x400e, "NOISE_LO"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x400f, "NOISE_HI"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4010, "DMC_FREQ"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4011, "DMC_RAW"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4012, "DMC_START"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4013, "DMC_LEN"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4014, "OAMDMA"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4015, "SND_CHN"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4016, "JOY1"))
			self.define_auto_symbol(Symbol(SymbolType.DataSymbol, 0x4017, "JOY2"))

			return True
		except:
			log_error(traceback.format_exc())
			return False