コード例 #1
0
def check_input_file(wanted):
    import idautils

    # some versions (7.4) of IDA return a truncated version of the MD5.
    # https://github.com/idapython/bin/issues/11
    try:
        found = idautils.GetInputFileMD5()[:31].decode("ascii").lower()
    except UnicodeDecodeError:
        # in IDA 7.5 or so, GetInputFileMD5 started returning raw binary
        # rather than the hex digest
        found = binascii.hexlify(idautils.GetInputFileMD5()[:15]).decode("ascii").lower()

    if not wanted.startswith(found):
        raise RuntimeError("please run the tests against sample with MD5: `%s`" % (wanted))
コード例 #2
0
def format_rules(fva, rules):
    '''
    given the address of a function, and the byte signatures for basic blocks in
     the function, format a complete YARA rule that matches all of the
     basic block signatures.
    '''
    name = idc.GetFunctionName(fva)

    # some characters aren't valid for YARA rule names
    safe_name = name
    BAD_CHARS = '@ /\\!@#$%^&*()[]{};:\'",./<>?'
    for c in BAD_CHARS:
        safe_name = safe_name.replace(c, '')

    md5 = idautils.GetInputFileMD5()
    ret = []
    ret.append('rule a_%s_%s {' % (md5, safe_name))
    ret.append('  meta:')
    ret.append('    sample_md5 = "%s"' % (md5))
    ret.append('    function_address = "0x%x"' % (fva))
    ret.append('    function_name = "%s"' % (name))
    ret.append('  strings:')
    for rule in rules:
        formatted_rule = ' '.join(rule.masked_bytes)
        ret.append('    %s = { %s }' % (rule.name, formatted_rule))
    ret.append('  condition:')
    ret.append('    all of them')
    ret.append('}')
    return '\n'.join(ret)
コード例 #3
0
def collect_metadata():
    md5 = idautils.GetInputFileMD5()
    if not isinstance(md5, six.string_types):
        md5 = capa.features.bytes_to_str(md5)

    sha256 = idaapi.retrieve_input_file_sha256()
    if not isinstance(sha256, six.string_types):
        sha256 = capa.features.bytes_to_str(sha256)

    return {
        "timestamp": datetime.datetime.now().isoformat(),
        # "argv" is not relevant here
        "sample": {
            "md5": md5,
            "sha1": "",  # not easily accessible
            "sha256": sha256,
            "path": idaapi.get_input_file_path(),
        },
        "analysis": {
            "format": idaapi.get_file_type_name(),
            "extractor": "ida",
            "base_address": idaapi.get_imagebase(),
        },
        "version": capa.version.__version__,
    }
コード例 #4
0
    def __init__(self, analyzer, feature_size, inner_offset,
                 classifiers_start_offsets, classifiers_end_offsets,
                 classifiers_mixed_offsets, classifier_type_offsets):
        """Create the function classifier according to the architecture-based configurations.

        Args:
            analyzer (instance): analyzer instance that we are going to link against
            feature_size (int): size of the feature set that we use after calibration
            inner_offset (int): calibration offset between a feature and a non-feature
            classifiers_start_offsets (dict): initial function start mapping: code type ==> feature byte offsets
            classifiers_end_offsets (dict): initial function end mapping: code type ==> feature byte offsets
            classifiers_mixed_offsets (dict): initial function start/end mapping: code type ==> feature byte offsets
            classifiers_type_offsets (list): initial function type: feature byte offsets
        """
        self._analyzer = analyzer
        self._feature_size = feature_size
        self._inner_offset = inner_offset
        self._classifiers_start_offsets = classifiers_start_offsets
        self._classifiers_end_offsets = classifiers_end_offsets
        self._classifiers_mixed_offsets = classifiers_mixed_offsets
        self._classifier_type_offsets = classifier_type_offsets
        self._start_classifiers = {}
        self._end_classifiers = {}
        self._mixed_classifiers = {}
        self._type_classifier = None
        # seed the random generator
        numpy.random.seed(seed=(int(idautils.GetInputFileMD5(), 16)
                                & 0xFFFFFFFF))
コード例 #5
0
    def __init__(self):
        self.fileName = idaapi.get_root_filename()
        self.fileMD5 = idautils.GetInputFileMD5()
        self.authorName = idc.ARGV[1]
        self.allStrings = {}
        self.subStrings = ["cout", "endl", "Xlength_error", "cerr"]
        self.returns = {"ret": 0, "retn": 0}
        self.libraryFunctionNamesDict = {
            "printf": [0, 0],
            "fprintf": [0, 0],
            "cout": [0, 0],
            "exit": [0, 0],
            "fflush": [0, 0],
            "endl": [0, 0],
            "puts": [0, 0],
            "Xlength_error": [0, 0],
            "clock": [0, 0],
            "cerr": [0, 0]
        }  #,"scanf":[0,0]}

        self.standardRegisters = {
            "eax": 0,
            "ebx": 0,
            "ecx": 0,
            "edx": 0,
            "esi": 0,
            "edi": 0
        }
        self.libraryFunctionNameEADict = {}
コード例 #6
0
def format_rules(fva, rules):
    '''
    given the address of a function, and the byte signatures for basic blocks in
     the function, format a complete YARA rule that matches all of the
     basic block signatures.
    '''
    name = GetFunctionName(fva)
    if not rules:
        logging.info('no rules for {}'.format(name))
        return None

    # some characters aren't valid for YARA rule names
    safe_name = name
    BAD_CHARS = '@ /\\!@#$%^&*()[]{};:\'",./<>?'
    for c in BAD_CHARS:
        safe_name = safe_name.replace(c, '')

    md5 = idautils.GetInputFileMD5()
    ret = []
    ret.append('rule a_{hash:s}_{name:s} {{'.format(hash=md5, name=safe_name))
    ret.append('  meta:')
    ret.append('    sample_md5 = "{md5:s}"'.format(md5=md5))
    ret.append('    function_address = "0x{fva:x}"'.format(fva=fva))
    ret.append('    function_name = "{name:s}"'.format(name=name))
    ret.append('  strings:')
    for rule in rules:
        formatted_rule = ' '.join(rule.masked_bytes).rstrip('?? ')
        ret.append('    {name:s} = {{ {hex:s} }}'.format(name=rule.name,
                                                         hex=formatted_rule))
    ret.append('  condition:')
    ret.append('    all of them')
    ret.append('}')
    return '\n'.join(ret)
コード例 #7
0
    def load_db(self, file_name=None):
        """
        Load DB from file and DeSeralize
        @param file_name: DB filename
        @return: True on success otherwise False
        """
        if file_name is None:
            file_name = self.get_default_db_filename()

        if not os.path.exists(file_name):
            raise IOError("DIE DB file not found")

        in_file = open(file_name, 'rb')

        db_tables = pickle.load(in_file)

        # Validate db MD5
        db_md5 = db_tables[0].md5
        if db_md5 != idautils.GetInputFileMD5():
            raise DbFileMismatch(
                "Db File is different then currently analyzed file")

        self.run_info = db_tables[0]
        self.functions = db_tables[1]
        self.function_args = db_tables[2]
        self.function_contexts = db_tables[3]
        self.threads = db_tables[4]
        self.dbg_values = db_tables[5]
        self.parsed_values = db_tables[6]
        self.excluded_bp_ea = db_tables[7]
        self.excluded_funcNames_part = db_tables[8]
        self.excluded_funcNames = db_tables[9]
        self.excluded_modules = db_tables[10]

        return True
コード例 #8
0
ファイル: yara_fn.py プロジェクト: imbillow/python-idb
def format_rules(fva, rules):
    """
    given the address of a function, and the byte signatures for basic blocks in
     the function, format a complete YARA rule that matches all of the
     basic block signatures.
    """
    name = idc.GetFunctionName(fva)

    # some characters aren't valid for YARA rule names
    safe_name = name
    BAD_CHARS = "@ /\\!@#$%^&*()[]{};:'\",./<>?"
    for c in BAD_CHARS:
        safe_name = safe_name.replace(c, "")

    md5 = idautils.GetInputFileMD5()
    ret = []
    ret.append("rule a_%s_%s {" % (md5, safe_name))
    ret.append("  meta:")
    ret.append('    sample_md5 = "%s"' % (md5))
    ret.append('    function_address = "0x%x"' % (fva))
    ret.append('    function_name = "%s"' % (name))
    ret.append("  strings:")
    for rule in rules:
        formatted_rule = " ".join(rule.masked_bytes)
        ret.append("    %s = { %s }" % (rule.name, formatted_rule))
    ret.append("  condition:")
    ret.append("    all of them")
    ret.append("}")
    return "\n".join(ret)
コード例 #9
0
ファイル: yara_fn.py プロジェクト: mmg1/idawilli
def format_rules(fva, rules):
    '''
    given the address of a function, and the byte signatures for basic blocks in
     the function, format a complete YARA rule that matches all of the
     basic block signatures.
    '''
    name = idc.get_func_name(fva)

    # some characters aren't valid for YARA rule names
    safe_name = name
    BAD_CHARS = '@ /\\!@#$%^&*()[]{};:\'",./<>?'
    for c in BAD_CHARS:
        safe_name = safe_name.replace(c, '')

    md5 = idautils.GetInputFileMD5().decode("utf-8",
                                            errors="ignore").rstrip('\x00')
    ret = []
    ret.append(f'rule a_{md5}_{safe_name}')
    ret.append('  meta:')
    ret.append(f'    sample_md5 = "{md5}"')
    ret.append(f'    function_address = "0x{fva}"')
    ret.append(f'    function_name = "{name}"')
    ret.append('  strings:')
    for rule in rules:
        formatted_rule = ' '.join(rule.masked_bytes)
        ret.append(f'    {rule.name} = {{{formatted_rule}}}')
    ret.append('  condition:')
    ret.append('    all of them')
    ret.append('}')
    return '\n'.join(ret)
コード例 #10
0
    def __init__(self):
        self.client = MongoClient('localhost', 27017)
        self.db = self.client.BinAuthor
        self.collection = self.db.Mahalanobis

        self.fileName = idaapi.get_root_filename()
        self.fileMD5 = idautils.GetInputFileMD5()
        self.authorName = self.fileName
コード例 #11
0
ファイル: lib.py プロジェクト: zysyyz/GhIDA
def create_random_filename():
    global GLOBAL_FILENAME

    if not GLOBAL_FILENAME:
        letters = [random.choice(string.ascii_letters) for i in range(5)]
        random_string = ''.join(letters)
        GLOBAL_FILENAME = "%s_%s" % (idautils.GetInputFileMD5(), random_string)
    return GLOBAL_FILENAME
コード例 #12
0
    def decide(self):
        """Sum up the information from all of the seen records, and decide what is the alignment pattern.

        Return Value:
            (alignment, pad byte) if found a full pattern, (alignment, None) if no padding, and None for errors.
        """
        # Sanity check
        if len(self._records) < 2:
            return None
        # Now check for a basic alignment rule
        seen_eas = map(lambda x: x[0], self._records)
        # Deterministic results per binary, but still random
        random.seed(int(idautils.GetInputFileMD5(), 16) & 0xFFFFFFFF)
        while True:
            # Check against two random candidates, and always make sure the representative isn't rare
            measure_candidate = seen_eas[random.randint(0, len(seen_eas) - 1)]
            measure_candidate_alt = seen_eas[random.randint(0, len(seen_eas) - 1)]
            gcds = map(lambda x: gcd(measure_candidate, x), seen_eas)
            gcds_alt = map(lambda x: gcd(measure_candidate_alt, x), seen_eas)
            alignment = min(gcds)
            alignment_alt = min(gcds_alt)
            if alignment > alignment_alt:
                alignment = alignment_alt
                measure_candidate = measure_candidate_alt
                try_again = True
            elif alignment != alignment_alt:
                try_again = True
            else:
                try_again = False
            # Try to check if removing outliers will improve the alignment
            if try_again or gcds.count(alignment) <= len(gcds) * 0.01:
                # pick the next element, and try to improve the result
                seen_eas = filter(lambda x: gcd(measure_candidate, x) != alignment, seen_eas)
            # we can't improve the results
            else:
                break
        # We shouldn't look for padding bytes (we have no size)
        if self._records[0][1] is None:
            return alignment
        # Alignment is 1, there is no padding to be found
        if alignment == 1:
            return (alignment, None)
        # Check if there is a common padding byte (skip the outliers)
        pad_byte = None
        for ea, size in filter(lambda x: x[0] % alignment == 0, self._records):
            for offset in xrange((alignment - ((ea + size) % alignment)) % alignment):
                test_byte = idc.Byte(ea + size + offset)
                if pad_byte is None:
                    pad_byte = test_byte
                # Failed to find a single padding byte...
                elif pad_byte != test_byte:
                    return (alignment, None)
        # Found a padding byte :)
        if pad_byte is not None:
            return (alignment, pad_byte)
        # There were no gaps to be padded, no padding is needed
        else:
            return (alignment, None)
コード例 #13
0
def check_input_file():
    import idautils

    wanted = "5f66b82558ca92e54e77f216ef4c066c"
    # some versions of IDA return a truncated version of the MD5.
    # https://github.com/idapython/bin/issues/11
    found = idautils.GetInputFileMD5().rstrip(b"\x00").decode("ascii").lower()
    if not wanted.startswith(found):
        raise RuntimeError("please run the tests against `mimikatz.exe`")
コード例 #14
0
def getHash(root_path):

    with open(root_path + 'hash.txt', 'w') as f:
        try:
            hash_md5 = idautils.GetInputFileMD5()
        except:
            hash_md5 = "error"

        f.write(hash_md5)
コード例 #15
0
ファイル: import-to-ida.py プロジェクト: walt1998/capa
def main():
    path = ida_kernwin.ask_file(False, "*", "capa report")
    if not path:
        return 0

    with open(path, "rb") as f:
        doc = json.loads(f.read().decode("utf-8"))

    if "meta" not in doc or "rules" not in doc:
        logger.error("doesn't appear to be a capa report")
        return -1

    # in IDA 7.4, the MD5 hash may be truncated, for example:
    # wanted: 84882c9d43e23d63b82004fae74ebb61
    # found: b'84882C9D43E23D63B82004FAE74EBB6\x00'
    #
    # see: https://github.com/idapython/bin/issues/11
    a = doc["meta"]["sample"]["md5"].lower()
    b = idautils.GetInputFileMD5().decode("ascii").lower().rstrip("\x00")
    if not a.startswith(b):
        logger.error("sample mismatch")
        return -2

    rows = []
    for rule in doc["rules"].values():
        if rule["meta"].get("lib"):
            continue
        if rule["meta"].get("capa/subscope"):
            continue
        if rule["meta"]["scope"] != "function":
            continue

        name = rule["meta"]["name"]
        ns = rule["meta"].get("namespace", "")
        for va in rule["matches"].keys():
            va = int(va)
            rows.append((ns, name, va))

    # order by (namespace, name) so that like things show up together
    rows = sorted(rows)
    for ns, name, va in rows:
        if ns:
            cmt = "%s (%s)" % (name, ns)
        else:
            cmt = "%s" % (name, )

        logger.info("0x%x: %s", va, cmt)
        try:
            # message will look something like:
            #
            #     capa: delete service (host-interaction/service/delete)
            append_func_cmt(va, "capa: " + cmt, repeatable=False)
        except ValueError:
            continue

    logger.info("ok")
コード例 #16
0
 def __init__(self):
     self.functionAddresstoRealFunctionName = {}
     self.functionRegisterChains = {}
     self.finalOutput = ''
     self.finalOutputFunctionLevel = ''
     self.simhashList = []
     self.registerChainMinhash = []
     self.blocks = []
     self.fileName = idaapi.get_root_filename()
     self.fileMD5 = idautils.GetInputFileMD5()
     self.authorName = idc.ARGV[1]
コード例 #17
0
ファイル: lib.py プロジェクト: xentrick/GhIDA
def ghidraaas_checkin(bin_file_path, filename, ghidra_server_url):
    """
    Upload the .bytes files in ghidraaas.
    One time only (until IDA is restarted...)
    """
    idaapi.show_wait_box("Connecting to Ghidraaas. Sending bytes file...")
    try:
        md5_hash = idautils.GetInputFileMD5()
        queue = Queue.Queue()

        my_args = (bin_file_path, filename, ghidra_server_url, md5_hash, queue)
        t1 = threading.Thread(target=ghidraaas_checkin_thread,
                              args=my_args)
        t1.start()

        counter = 0
        stop = False

        while not stop:
            time.sleep(SLEEP_LENGTH)
            counter += 1

            # User terminated action
            if idaapi.user_cancelled():
                stop = True
                print("GhIDA:: [!] Check-in interrupted.")
                continue

            # Reached TIIMEOUT
            if counter > COUNTER_MAX:
                stop = True
                print("GhIDA:: [!] Timeout reached.")
                continue

            # Thread terminated
            if not t1.isAlive():
                stop = True
                print("GhIDA:: [DEBUG] Thread terminated.")
                continue

        print("GhIDA:: [DEBUG] Joining check-in thread.")
        t1.join(0)
        q_result = queue.get_nowait()
        print("GhIDA:: [DEBUG] Thread joined. Got queue result.")
        idaapi.hide_wait_box()
        return q_result

    except Exception:
        idaapi.hide_wait_box()
        print("GhIDA:: [!] Check-in error.")
        idaapi.warning("GhIDA check-in error")
        return False
コード例 #18
0
def collect_metadata():
    return {
        "timestamp": datetime.datetime.now().isoformat(),
        # "argv" is not relevant here
        "sample": {
            "md5": capa.features.bytes_to_str(idautils.GetInputFileMD5()),
            # "sha1" not easily accessible
            "sha256": capa.features.bytes_to_str(idaapi.retrieve_input_file_sha256()),
            "path": idaapi.get_input_file_path(),
        },
        "analysis": {"format": idaapi.get_file_type_name(), "extractor": "ida",},
        "version": capa.version.__version__,
    }
コード例 #19
0
ファイル: lib.py プロジェクト: secretnonempty/GhIDA
def ghidraaas_checkout(ghidra_server_url):
    """
    That's all. Remove .bytes file from Ghidraaas server.
    """
    if not GLOBAL_CHECKIN:
        return

    idaapi.show_wait_box(
        "Connecting to Ghidraaas. Removing temporary files...")
    try:
        md5_hash = idautils.GetInputFileMD5()
        aargs = (md5_hash, ghidra_server_url)

        t1 = threading.Thread(target=ghidraaas_checkout_thread,
                              args=aargs)
        t1.start()

        counter = 0
        stop = False

        while not stop:
            # print("waiting check-out 1 zzz")
            # idaapi.request_refresh(idaapi.IWID_DISASMS)
            time.sleep(0.1)

            if wasbreak():
                print("GhIDA:: [!] Check-out interrupted.")
                stop = True
                continue

            if counter > COUNTER_MAX * 10:
                print("GhIDA:: [!] Timeout reached.")
                stop = True
                continue

            if not t1.isAlive():
                stop = True
                print("GhIDA:: [DEBUG] Thread terminated.")
                continue

        print("GhIDA:: [DEBUG] Joining check-out thread.")
        t1.join(0)
        print("GhIDA:: [DEBUG] Thread joined")
        idaapi.hide_wait_box()
        return

    except Exception:
        idaapi.hide_wait_box()
        print("GhIDA:: [!] Check-out error")
        idaapi.warning("GhIDA check-out error")
        return
コード例 #20
0
def get_as_single_surrogate(funcs=None):
    data = dict()
    data['name'] = _get_bin_name()
    data['architecture'] = _get_arch()
    data['md5'] = idautils.GetInputFileMD5()

    if funcs is None:
        funcs = get_all_ida_funcs()
    if not isinstance(funcs, list):
        funcs = [funcs]
    data['functions'] = [
        _get_ida_func_surrogate(func, data['architecture']['type'])
        for func in funcs
    ]
    return data
コード例 #21
0
ファイル: actions.py プロジェクト: snyiu100/IDAConnect
    def _dialog_accepted(self, dialog):
        """
        Called when the save dialog is accepted by the user.

        :param dialog: the save dialog
        """
        repo, branch = dialog.get_result()

        # Create new repository if necessary
        if not repo:
            hash = idautils.GetInputFileMD5()
            file = idc.GetInputFile()
            type = idaapi.get_file_type_name()
            dateFormat = "%Y/%m/%d %H:%M"
            date = datetime.datetime.now().strftime(dateFormat)
            repo = Repository(hash, file, type, date)
            d = self._plugin.network.send_packet(NewRepository.Query(repo))
            d.addCallback(partial(self._on_new_repository_reply, repo, branch))
            d.addErrback(logger.exception)
        else:
            self._on_new_repository_reply(repo, branch, None)
コード例 #22
0
ファイル: ghida.py プロジェクト: secretnonempty/GhIDA
def load_configuration():
    """
    """
    global GHIDA_CONF
    global DECOMPILED_CACHE
    global COMMENTS_CACHE

    # Loading the plugin configuration
    print("GhIDA:: [DEBUG] Reading GhIDA configuration")
    GHIDA_CONF = gl.GhidaConfiguration()

    print("GHIDA_CONF.load_save_cached_code", GHIDA_CONF.load_save_cached_code)
    print("GHIDA_CONF.load_save_cached_comments",
          GHIDA_CONF.load_save_cached_comments)

    md5 = idautils.GetInputFileMD5()

    # Initalize the cache (and load cached objects)
    DECOMPILED_CACHE = gl.DecompiledCache(
        file_id=md5, use_cache=GHIDA_CONF.load_save_cached_code)
    COMMENTS_CACHE = gl.CommentsCache(
        file_id=md5, use_cache=GHIDA_CONF.load_save_cached_comments)

    return
コード例 #23
0
    def YaraExport(self):

        def pretty_hex(data):
            return ' '.join(data[i:i+2] for i in range(0, len(data), 2))

        def rich_header():
            try:
                pe = pefile.PE(GetInputFilePath().decode("utf-8"))
            except:
                pe = pefile.PE(GetInputFilePath())

            rich_header = pe.parse_rich_header()
            return hashlib.md5(rich_header['clear_data']).hexdigest()

        def imphash():
            try:
                pe = pefile.PE(GetInputFilePath().decode("utf-8"))
            except:
                pe = pefile.PE(GetInputFilePath())
            return pe.get_imphash()

        global ruleset_list
        info = idaapi.get_inf_structure()
        if info.is_64bit():
            md = Cs(CS_ARCH_X86, CS_MODE_64)
        elif info.is_32bit():
            md = Cs(CS_ARCH_X86, CS_MODE_32)
        result = "import \"hash\"\n"
        result += "import \"pe\"\n\n"
        result += "rule " + self.Variable_name.text() + "\n{\n"
        result += "  meta:\n"
        result += "      tool = \"https://github.com/hy00un/Hyara\"\n"
        result += "      version = \"" + "1.8" + "\"\n"
        result += "      date = \"" + time.strftime("%Y-%m-%d") + "\"\n"
        result += "      MD5 = \"" + idautils.GetInputFileMD5() + "\"\n"
        result += "  strings:\n"
        for name in ruleset_list.keys():
            try:
                CODE = bytearray.fromhex(ruleset_list[name][0][1:-1].strip().replace("\\x"," "))
                print(CODE)
                print(type(CODE))
                if self.CheckBox1.isChecked():
                    result += "      /*\n"
                    for i in md.disasm(bytes(CODE), 0x1000):
                        byte_data = "".join('{:02X}'.format(x) for x in i.bytes)
                        result += "          %-10s\t%-30s\t\t|%s" % (i.mnemonic.upper(), i.op_str.upper().replace("0X","0x"), byte_data.upper()) + "\n"
                    result += "      */\n"

                ## http://sparksandflames.com/files/x86InstructionChart.html
                ## https://pnx.tf/files/x86_opcode_structure_and_instruction_overview.png
                ## http://ref.x86asm.net/coder32.html
                ## http://www.mathemainzel.info/files/x86asmref.html #
                if self.CheckBox2.isChecked(): # yara wildcard isChecked()
                    opcode = []
                    CODE = bytearray.fromhex(ruleset_list[name][0][1:-1].strip().replace("\\x"," "))
                    for i in md.disasm(bytes(CODE), 0x1000):
                        byte_data = "".join('{:02X}'.format(x) for x in i.bytes)

                        if byte_data.startswith("FF"): # ex) ff d7 -> call edi
                            opcode.append("FF [1-5]")

                        elif byte_data.startswith("0F"): # ex) 0f 84 bb 00 00 00 -> jz loc_40112A, 0f b6 0b -> movzx cx, byte ptr [ebx]
                            opcode.append("0F [1-5]") # (multi byte)

                        elif re.compile("7[0-9A-F]").match(byte_data): # jo, jno, jb, jnb, jz, jnz, jbe, ja, js, jns, jp, jnp, jl, jnl, jle, jnle
                            opcode.append(byte_data[:2]+" ??") # ex) 7c 7f -> jl 0x81 (7c only 1 byte) (1byte < have 0f)

                        elif i.mnemonic == "push":
                            if re.compile("5[0-7]|0(6|E)|1(6|E)").match(byte_data): # push e[a-b-c]x ..
                                opcode.append(byte_data[:1]+"?")
                            elif re.compile("6(8|A)+").match(byte_data):
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "pop":
                            if re.compile("5[8-F]|07|1(7|F)").match(byte_data): # pop e[a-b-c]x ..
                                opcode.append(byte_data[:1]+"?")
                            elif re.compile("8F").match(byte_data):
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "mov":
                            if re.compile("B[8-F]").match(byte_data): # ex) b8 01 22 00 00 -> mov eax, 0x2201, bf 38 00 00 00 -> mov edi, 38 , 8b 54 24 10 -> mov edx, [esp+32ch+var_31c]
                                opcode.append(byte_data[:2]+" [4]")
                            elif re.compile("B[0-7]").match(byte_data): # ex) b7 60 -> mov bh, 0x60
                                opcode.append("B? "+byte_data[2:])
                            elif re.compile("8[8-9A-C]|8E").match(byte_data): # ex) 8b 3d a8 e1 40 00 -> mov edi, ds:GetDlgItem
                                opcode.append(byte_data[:2]+" [1-4]") # ex) 8b 5c 24 14 -> mob ebx, [esp+10+ThreadParameter] , 8b f0 -> mov esi, eax
                            elif re.compile("C[6-7]").match(byte_data): # ex) c7 44 24 1c 00 00 00 00 -> mov [esp+338+var_31c], 0
                                opcode.append(byte_data[:2]+" [2-8]")
                            elif re.compile("A[0-3]").match(byte_data):
                                opcode.append(byte_data[:2]+" [1-4]") # ex) a1 60 40 41 00 -> mov eax, __security_cookie
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "inc":
                            if re.compile("4[0-7]").match(byte_data):
                                opcode.append(byte_data[:1]+"?")
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "dec":
                            if re.compile("4[8-9A-F]").match(byte_data): # 48 ~ 4f
                                opcode.append(byte_data[:1]+"?")
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "xor":
                            if re.compile("3[0-3]").match(byte_data):
                                opcode.append(byte_data[:2]+" [1-4]")
                            elif re.compile("34").match(byte_data): # ex) 34 da -> xor al, 0xda 
                                opcode.append(byte_data[:2]+" ??")
                            elif re.compile("35").match(byte_data): # ex) 35 da 00 00 00 -> xor eax, 0xda
                                opcode.append("35 [4]")
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "add":
                            if re.compile("0[0-3]").match(byte_data):
                                opcode.append(byte_data[:2]+" [1-4]")
                            elif re.compile("04").match(byte_data): # ex) 04 da -> xor al, 0xda 
                                opcode.append(byte_data[:2]+" ??")
                            elif re.compile("05").match(byte_data): # ex) 05 da 00 00 00 -> xor eax, 0xda
                                opcode.append("05 [4]")
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "call":
                            if re.compile("E8").match(byte_data):
                                opcode.append("E8 [4]") # call address(?? ?? ?? ??)
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "test":
                            if re.compile("8[4-5]|A8").match(byte_data): # ex) 84 ea -> test dl, ch
                                opcode.append(byte_data[:2]+" ??") 
                            elif re.compile("A9").match(byte_data): # ex) a9 ea 00 00 00 -> test eax, 0xea
                                opcode.append("A9 [4]")
                            elif re.compile("F[6-7]").match(byte_data):
                                opcode.append(byte_data[:2]+" [2-7]")
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "and":
                            if re.compile("8[0-3]").match(byte_data):
                                opcode.append(byte_data[:2] + " " + byte_data[2:3] + "? [4]") # ex) 81 e3 f8 07 00 00 -> and ebx, 7f8
                            elif re.compile("2[0-3]").match(byte_data):
                                opcode.append(byte_data[:2]+" [1-4]")
                            elif re.compile("24").match(byte_data):
                                opcode.append(byte_data[:2]+" ??") # ex) 22 d1 -> and dl, cl
                            elif re.compile("25").match(byte_data):
                                opcode.append(byte_data[:2]+" [4]")
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "lea":
                            if re.compile("8D").match(byte_data): # ex) 8d 9b 00 00 00 00 -> lea ebx, [ebx+0] == 8d 1b
                                opcode.append("8D [1-6]")
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "sub":
                            if re.compile("2[8A-B]").match(byte_data): # ex) 2a 5c 24 14 -> sub	bl, byte ptr [esp + 0x14]
                                opcode.append(byte_data[:2]+" [1-4]")
                            elif re.compile("2C").match(byte_data): # ex) 28 da -> sub dl, bl
                                opcode.append(byte_data[:2]+" ??")
                            elif re.compile("2D").match(byte_data): # ex) 2d da 00 00 00 -> sub eax, 0xda
                                opcode.append("2D [4]")
                            elif re.compile("8[2-3]").match(byte_data):
                                opcode.append("8? "+byte_data[2:])
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "or":
                            if re.compile("0[8A-B]").match(byte_data): # ex) 08 14 30 -> or byte ptr [eax + esi], dl , 0b 5c 24 14 -> or ebx, dword ptr [esp + 0x14]
                                opcode.append(byte_data[:2]+" [1-4]")
                            elif re.compile("0C").match(byte_data): # ex) 0c ea -> or al, 0xea
                                opcode.append(byte_data[:2]+" ??")
                            elif re.compile("0D").match(byte_data): # ex) 0d ea 00 00 00 -> or eax, 0xea
                                opcode.append("0D [4]")
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "cmp":
                            if re.compile("3[8A-B]").match(byte_data):
                                opcode.append(byte_data[:2]+" [1-4]")
                            elif re.compile("3C").match(byte_data): # ex) 3a ea -> cmp ch, dl
                                opcode.append(byte_data[:2]+" ??")
                            elif re.compile("3D").match(byte_data): # ex) 3d ea 00 00 00 -> cmp eax, 0xea
                                opcode.append("3D [4]")
                            else:
                                opcode.append(pretty_hex(byte_data))

                        elif i.mnemonic == "shl" or i.mnemonic == "sar":
                            if re.compile("C[0-1]").match(byte_data): # ex) c1 fa 02 -> sar edx, 2 , 
                                opcode.append(byte_data[:2]+" [2]")
                            elif re.compile("D[0-3]").match(byte_data): # ex) d0 fa -> sar dl, 1
                                opcode.append(byte_data[:2]+" ??")
                            else:
                                opcode.append(pretty_hex(byte_data))
                        
                        elif i.mnemonic == "xchg":
                            if re.compile("9[1-7]").match(byte_data):
                                opcode.append(byte_data[:1]+"?")
                            elif re.compile("8[6-7]").match(byte_data):
                                opcode.append(byte_Data[:2]+ " [1-6]")
                            else:
                                opcode.append(pretty_hex(byte_data))

                        else:
                            opcode.append(pretty_hex(byte_data))


                    try:
                        if ''.join(opcode)[-1] == "]": # syntax error, unexpected '}', expecting _BYTE_ or _MASKED_BYTE_ or '(' or '['
                            opcode.append("??")
                    except:
                        pass

                    result += "      $" + name + " = {" + ' '.join(opcode).upper() + "}\n"
                else:
                    opcode = pretty_hex(ruleset_list[name][0][1:-1])
                    result += "      $" + name + " = {" + opcode.upper() +"}\n"
            except ValueError: # string option
                result += "      $" + name + " = " + ruleset_list[name][0]+"\n"
        result += "  condition:\n"
        result += "      all of them"
        if self.CheckBox4.isChecked():
            result += " and hash.md5(pe.rich_signature.clear_data) == \"" + rich_header() + "\""
        
        if self.CheckBox5.isChecked():
            result += " and pe.imphash() == \"" + imphash() + "\""
        result += "\n}"
        self.TextEdit1.clear()
        self.TextEdit1.insertPlainText(result)
コード例 #24
0
def get_file_md5():
    """ """
    md5 = idautils.GetInputFileMD5()
    if not isinstance(md5, str):
        md5 = capa.features.bytes_to_str(md5)
    return md5
コード例 #25
0
 def __init__(self):
     self.fileName = idaapi.get_root_filename()
     self.fileMD5 = idautils.GetInputFileMD5()
     self.authorName = idc.ARGV[1]
コード例 #26
0
def get_input_file_hash():
    return idautils.GetInputFileMD5()
コード例 #27
0
ファイル: lib.py プロジェクト: zysyyz/GhIDA
def ghidraaas_decompile(address, xml_file_path, bin_file_path,
                        ghidra_server_url):
    """
    Send the xml file to ghidraaas
    and ask to decompile a function
    """
    global GLOBAL_CHECKIN

    # Filename without the .xml extension
    filename = GLOBAL_FILENAME

    if not GLOBAL_CHECKIN:
        if ghidraaas_checkin(bin_file_path, filename, ghidra_server_url):
            GLOBAL_CHECKIN = True
        else:
            raise Exception("[!] Ghidraaas Check-in error")

    idaapi.show_wait_box("Connecting to Ghidraaas. Decompiling function %s" %
                         address)

    try:
        md5_hash = idautils.GetInputFileMD5()
        queue = Queue.Queue()

        aargs = (address, xml_file_path, bin_file_path, ghidra_server_url,
                 filename, md5_hash, queue)
        t1 = threading.Thread(target=ghidraaas_decompile_thread, args=aargs)
        t1.start()

        counter = 0
        stop = False

        while not stop:
            # idaapi.request_refresh(idaapi.IWID_DISASMS)
            # print("waiting decompile 1 zzz")
            time.sleep(0.1)

            if idaapi.wasBreak():
                print("GhIDA:: [!] decompilation interrupted.")
                stop = True
                continue

            if counter > COUNTER_MAX * 10:
                print("GhIDA:: [!] Timeout reached.")
                stop = True
                continue

            if not t1.isAlive():
                stop = True
                print("GhIDA:: [DEBUG] Thread terminated.")
                continue

        print("GhIDA:: [DEBUG] Joining decompilation thread.")
        t1.join(0)
        q_result = queue.get_nowait()
        print("GhIDA:: [DEBUG] Thread joined. Got queue result.")
        idaapi.hide_wait_box()
        return q_result

    except Exception:
        idaapi.hide_wait_box()
        print("GhIDA:: [!] Unexpected decompilation error")
        idaapi.warning("GhIDA decompilation error")
        return None
コード例 #28
0
ファイル: ida_api.py プロジェクト: zha0/Hyara
 def get_md5(self) -> str:
     return idautils.GetInputFileMD5().hex()
コード例 #29
0
ファイル: YaToolHashProvider.py プロジェクト: tmcmil/YaCo
 def __init__(self):
     _yatools_hash_provider.set_string_start("md5=" +
                                             idautils.GetInputFileMD5() +
                                             "----")
     self.count = 0
     self.populate_struc_enum_ids()
コード例 #30
0
ファイル: dumpmasm.py プロジェクト: zzmjohn/pharos
            # There are other types that IDA recognizes.
        elif ida_bytes.is_unknown(iflags):
            tcode = "UNK-%08X" % iflags
            imnem = "???"
            iops = "???"

        for faddr in sorted(faddrs):
            out.write('"PART",0x%08X,"%s",0x%08X,"%s","%s","%s"\n' %
                      (ea, tcode, faddr, ihexbytes, imnem, iops))
        ea = ida_bytes.next_head(ea, max_ea)
    print "Analysis complete!"


outname = None
if True:
    fhash = idautils.GetInputFileMD5()
    if fhash is not None:
        fhash = fhash.lower()
        outname = "/tmp/%s_idadata.csv" % fhash

if outname == None:
    # Get the file name of the file being analyzed.
    fpath = ida_nalt.get_input_file_path()
    fname = os.path.basename(fpath)
    outname = "/tmp/%s_idadata.csv" % fname

print "The output filename is '%s'." % outname
outfile = open(outname, 'w')

ida_auto.auto_wait()
dump_heads(outfile)