Esempio n. 1
0
    def __init__(self, analyzer, feature_size, inner_offset, classifiers_start_offsets, classifiers_end_offsets, classifiers_mixed_offsets, classifier_type_offsets):
        """Create the function classifier according to the architecture-based configurations.

        Args:
            analyzer (instance): analyzer instance that we are going to link against
            feature_size (int): size of the feature set that we use after calibration
            inner_offset (int): calibration offset between a feature and a non-feature
            classifiers_start_offsets (dict): initial function start mapping: code type ==> feature byte offsets
            classifiers_end_offsets (dict): initial function end mapping: code type ==> feature byte offsets
            classifiers_mixed_offsets (dict): initial function start/end mapping: code type ==> feature byte offsets
            classifiers_type_offsets (list): initial function type: feature byte offsets
        """
        self._analyzer = analyzer
        self._feature_size = feature_size
        self._inner_offset = inner_offset
        self._classifiers_start_offsets = classifiers_start_offsets
        self._classifiers_end_offsets   = classifiers_end_offsets
        self._classifiers_mixed_offsets = classifiers_mixed_offsets
        self._classifier_type_offsets   = classifier_type_offsets
        self._start_classifiers = {}
        self._end_classifiers   = {}
        self._mixed_classifiers = {}
        self._type_classifier   = None
        # seed the random generator
        numpy.random.seed(seed=struct.unpack("!I", ida_nalt.retrieve_input_file_md5()[:4])[0])
Esempio n. 2
0
    def __init__(self, f_debug, db_path, min_bytes, f_ex_libthunk, f_update, f_ana_exp, ana_pre, f_ana_cmp = False, f_fol_cmp = False, ana_fol='', threshold = None, threshold_cfg = None, max_bytes_for_score = None, ratio = 0):
        self.f_debug = f_debug
        self.conn = sqlite3.connect(db_path)
        self.cur = self.conn.cursor()
        self.init_db()
        self.in_memory_db()        
        self.min_bytes = min_bytes
        self.f_ex_libthunk = f_ex_libthunk
        # for export
        self.f_update = f_update
        self.f_ana_exp = f_ana_exp        
        self.ana_pre = ana_pre
        if f_ana_exp:
            self.ana_pat = re.compile(self.ana_pre)
        # for compare
        self.f_ana_cmp = f_ana_cmp
        self.f_fol_cmp = f_fol_cmp
        self.ana_fol = ana_fol
        self.threshold = threshold
        self.threshold_cfg = threshold_cfg
        self.max_bytes_for_score = max_bytes_for_score
        self.ratio = float(ratio)

        self.idb_path = get_idb_path()
        self.sha256 = ida_nalt.retrieve_input_file_sha256()
        try:
            self.sha256 = self.sha256.lower()
        except AttributeError:
            message = 'ida_nalt.retrieve_input_file_sha256() returned None. Probably the IDB was generated by old IDA (<6.9). Check the version by ida_netnode.cvar.root_node.supstr(ida_nalt.RIDX_IDA_VERSION)'
            error(message)
            ida_kernwin.warning(message)            
        self.md5 = ida_nalt.retrieve_input_file_md5().lower()
Esempio n. 3
0
    def _create_project_accepted(self, dialog):
        """Called when the project creation dialog is accepted."""
        name = dialog.get_result()
        # Ensure we don't already have a project with that name
        # Note: 2 different groups can have two projects with the same name
        # and it will effectively be 2 different projects
        if any(project.name == name for project in self._projects):
            failure = QMessageBox()
            failure.setIcon(QMessageBox.Warning)
            failure.setStandardButtons(QMessageBox.Ok)
            failure.setText("A project with that name already exists!")
            failure.setWindowTitle("New Project")
            icon_path = self._plugin.plugin_resource("upload.png")
            failure.setWindowIcon(QIcon(icon_path))
            failure.exec_()
            return

        # Get all the information we need and sent it to the server
        hash = ida_nalt.retrieve_input_file_md5()
        # Remove the trailing null byte, if exists
        if hash.endswith(b'\x00'):
            hash = hash[0:-1]
        # This decode is safe, because we have an hash in hex format
        hash = binascii.hexlify(hash).decode('utf-8')
        file = ida_nalt.get_root_filename()
        ftype = ida_loader.get_file_type_name()
        date_format = "%Y/%m/%d %H:%M"
        date = datetime.datetime.now().strftime(date_format)
        project = Project(self._group.name, name, hash, file, ftype, date)
        d = self._plugin.network.send_packet(CreateProject.Query(project))
        d.add_callback(partial(self._project_created, project))
        d.add_errback(self._plugin.logger.exception)
Esempio n. 4
0
    def _create_project_accepted(self, dialog):
        """Called when the project creation dialog is accepted."""
        name = dialog.get_result()

        # Ensure we don't already have a project with that name
        if any(project.name == name for project in self._projects):
            failure = QMessageBox()
            failure.setIcon(QMessageBox.Warning)
            failure.setStandardButtons(QMessageBox.Ok)
            failure.setText("A project with that name already exists!")
            failure.setWindowTitle("New Project")
            icon_path = self._plugin.plugin_resource("upload.png")
            failure.setWindowIcon(QIcon(icon_path))
            failure.exec_()
            return

        # Get all the information we need and sent it to the server
        hash = ida_nalt.retrieve_input_file_md5().lower()
        file = ida_nalt.get_root_filename()
        type = ida_loader.get_file_type_name()
        date_format = "%Y/%m/%d %H:%M"
        date = datetime.datetime.now().strftime(date_format)
        project = Project(name, hash, file, type, date)
        d = self._plugin.network.send_packet(CreateProject.Query(project))
        d.add_callback(partial(self._project_created, project))
        d.add_errback(self._plugin.logger.exception)
Esempio n. 5
0
    def __init__(self, analyzer, feature_size, inner_offset, classifiers_start_offsets, classifiers_end_offsets, classifiers_mixed_offsets, classifier_type_offsets):
        """Create the function classifier according to the architecture-based configurations.

        Args:
            analyzer (instance): analyzer instance that we are going to link against
            feature_size (int): size of the feature set that we use after calibration
            inner_offset (int): calibration offset between a feature and a non-feature
            classifiers_start_offsets (dict): initial function start mapping: code type ==> feature byte offsets
            classifiers_end_offsets (dict): initial function end mapping: code type ==> feature byte offsets
            classifiers_mixed_offsets (dict): initial function start/end mapping: code type ==> feature byte offsets
            classifiers_type_offsets (list): initial function type: feature byte offsets
        """
        self._analyzer = analyzer

        # Init all sub-classifiers
        self._classifiers_start = {}
        self._classifiers_end = {}
        self._classifiers_mixed = {}
        for code_type in self._analyzer.activeCodeTypes():
            self._classifiers_start[code_type] = FeatureClassifier(analyzer, "Function Prologue", feature_size, inner_offset, \
                                                                   classifiers_start_offsets[code_type], lambda x: x.start_ea, lambda x: int(FunctionClassifier.isFuncStart(x)))
            self._classifiers_end[code_type] = FeatureClassifier(analyzer, "Function Epilogue", feature_size, inner_offset, \
                                                                   classifiers_end_offsets[code_type], lambda x: x.end_ea, lambda x: int(FunctionClassifier.isFuncEnd(x)))
            self._classifiers_mixed[code_type] = FeatureClassifier(analyzer, "Function Prologue/Epilogue", feature_size, inner_offset, \
                                                                   classifiers_mixed_offsets[code_type], lambda x: x.start_ea, lambda x: int(FunctionClassifier.isFuncStart(x)))
        # And now, the type classifier
        if analyzer.hasActiveCodeTypes():
            self._classifier_type = FeatureClassifier(analyzer, "Function Type", feature_size, inner_offset, classifier_type_offsets, lambda x: x.start_ea, lambda x: self._analyzer.codeType(x))
        # seed the random generator
        numpy.random.seed(seed=struct.unpack("!I", ida_nalt.retrieve_input_file_md5()[:4])[0])
Esempio n. 6
0
    def _new_repo_accepted(self, dialog):
        """
        Called when the new repository dialog is accepted by the user.

        :param dialog: the dialog
        """
        name = dialog.get_result()
        if any(repo.name == name for repo in self._repos):
            failure = QMessageBox()
            failure.setIcon(QMessageBox.Warning)
            failure.setStandardButtons(QMessageBox.Ok)
            failure.setText("A repository with that name already exists!")
            failure.setWindowTitle("New Repository")
            iconPath = self._plugin.resource('upload.png')
            failure.setWindowIcon(QIcon(iconPath))
            failure.exec_()
            return

        hash = ida_nalt.retrieve_input_file_md5().lower()
        file = ida_nalt.get_root_filename()
        type = ida_loader.get_file_type_name()
        dateFormat = "%Y/%m/%d %H:%M"
        date = datetime.datetime.now().strftime(dateFormat)
        repo = Repository(name, hash, file, type, date)
        d = self._plugin.network.send_packet(NewRepository.Query(repo))
        d.add_callback(partial(self._on_new_repo, repo))
        d.add_errback(logger.exception)
Esempio n. 7
0
def log_pp_guids():
    idc.auto_wait()
    analyser = Analyser()
    if not analyser.valid:
        idc.qexit(-1)
    analyser.get_boot_services()
    analyser.get_protocols()
    analyser.get_prot_names()
    data = {}
    data['module_name'] = idaapi.get_root_filename()
    data['protocols'] = []
    for protocol_record in analyser.Protocols['all']:
        if (protocol_record['protocol_name'] == 'ProprietaryProtocol'):
            guid = get_guid_str(protocol_record['guid'])
            service = protocol_record['service']
            address = '{addr:#x}'.format(addr=protocol_record['address'])
            data['protocols'].append({
                'guid': guid,
                'service': service,
                'address': address
            })
    logs_dir = os.path.join(tempfile.gettempdir(), 'uefi-retool-pp-guids')
    if not os.path.isdir(logs_dir):
        os.mkdir(logs_dir)
    log_fname = os.path.join(
        logs_dir, '{}.json'.format(
            binascii.hexlify(ida_nalt.retrieve_input_file_md5()).decode()))
    with open(log_fname, 'w') as f:
        json.dump(data, f, indent=4)
    idc.qexit(0)
Esempio n. 8
0
 def _refresh_projects(self):
     super(SaveDialog, self)._refresh_projects()
     hash = ida_nalt.retrieve_input_file_md5().lower()
     for row in range(self._projects_table.rowCount()):
         item = self._projects_table.item(row, 0)
         project = item.data(Qt.UserRole)
         if project.hash != hash:
             item.setFlags(item.flags() & ~Qt.ItemIsEnabled)
Esempio n. 9
0
    def decide(self):
        """Sum up the information from all of the seen records, and decide what is the alignment pattern.

        Return Value:
            (alignment, pad byte) if found a full pattern, (alignment, None) if no padding, and None for errors.
        """
        # Sanity check
        if len(self._records) < 2:
            return None
        # Now check for a basic alignment rule
        seen_eas = list(map(lambda x: x[0], self._records))
        # Deterministic results per binary, but still random
        random.seed(struct.unpack("!I", ida_nalt.retrieve_input_file_md5()[:4])[0])
        while True:
            # Check against two random candidates, and always make sure the representative isn't rare
            measure_candidate = seen_eas[random.randint(0, len(seen_eas) - 1)]
            measure_candidate_alt = seen_eas[random.randint(0, len(seen_eas) - 1)]
            gcds = list(map(lambda x: gcd(measure_candidate, x), seen_eas))
            gcds_alt = list(map(lambda x: gcd(measure_candidate_alt, x), seen_eas))
            alignment = min(gcds)
            alignment_alt = min(gcds_alt)
            if alignment > alignment_alt:
                alignment = alignment_alt
                measure_candidate = measure_candidate_alt
                try_again = True
            elif alignment != alignment_alt:
                try_again = True
            else:
                try_again = False
            # Try to check if removing outliers will improve the alignment
            if try_again or gcds.count(alignment) <= len(gcds) * 0.01:
                # pick the next element, and try to improve the result
                seen_eas = list(filter(lambda x: gcd(measure_candidate, x) != alignment, seen_eas))
            # we can't improve the results
            else:
                break
        # We shouldn't look for padding bytes (we have no size)
        if self._records[0][1] is None:
            return alignment
        # Alignment is 1, there is no padding to be found
        if alignment == 1:
            return (alignment, None)
        # Check if there is a common padding byte (skip the outliers)
        pad_byte = None
        for ea, size in filter(lambda x: x[0] % alignment == 0, self._records):
            for offset in range((alignment - ((ea + size) % alignment)) % alignment):
                test_byte = idc.get_wide_byte(ea + size + offset)
                if pad_byte is None:
                    pad_byte = test_byte
                # Failed to find a single padding byte...
                elif pad_byte != test_byte:
                    return (alignment, None)
        # Found a padding byte :)
        if pad_byte is not None:
            return (alignment, pad_byte)
        # There were no gaps to be padded, no padding is needed
        else:
            return (alignment, None)
Esempio n. 10
0
 def Show(self):
     if ida_nalt.retrieve_input_file_md5() is None:
         return
     else:
         return PluginForm.Show(
             self,
             NAME,
             options=(PluginForm.WCLS_CLOSE_LATER | PluginForm.WOPN_RESTORE
                      | PluginForm.WCLS_SAVE))
Esempio n. 11
0
    def _refresh_projects(self):
        self._plugin.logger.debug("SaveDialog._refresh_projects()")
        super(SaveDialog, self)._refresh_projects()
        self._plugin.logger.debug("SaveDialog._refresh_projects() continue")

        if len(self._projects) == 0:
            self._plugin.logger.info("No project to display yet 3")
            return  # no project in the group yet

        hash = ida_nalt.retrieve_input_file_md5()
        if hash.endswith(b'\x00'):
            hash = hash[0:-1]
        # This decode is safe, because we have an hash in hex format
        hash = binascii.hexlify(hash).decode('utf-8')
        for row in range(self._projects_table.rowCount()):
            item = self._projects_table.item(row, 0)
            project = item.data(Qt.UserRole)
            if project.hash != hash:
                item.setFlags(item.flags() & ~Qt.ItemIsEnabled)
Esempio n. 12
0
    def __init__(self,
                 f_debug,
                 db_path,
                 min_bytes,
                 f_ex_libthunk,
                 f_update,
                 f_ana_exp,
                 ana_pre,
                 f_ana_cmp=False,
                 f_fol_cmp=False,
                 ana_fol='',
                 threshold=None,
                 threshold_cfg=None,
                 max_bytes_for_score=None,
                 ratio=0):
        self.f_debug = f_debug
        self.conn = sqlite3.connect(db_path)
        self.cur = self.conn.cursor()
        self.init_db()
        self.in_memory_db()
        self.min_bytes = min_bytes
        self.f_ex_libthunk = f_ex_libthunk
        # for export
        self.f_update = f_update
        self.f_ana_exp = f_ana_exp
        self.ana_pre = ana_pre
        if f_ana_exp:
            self.ana_pat = re.compile(self.ana_pre)
        # for compare
        self.f_ana_cmp = f_ana_cmp
        self.f_fol_cmp = f_fol_cmp
        self.ana_fol = ana_fol
        self.threshold = threshold
        self.threshold_cfg = threshold_cfg
        self.max_bytes_for_score = max_bytes_for_score
        self.ratio = float(ratio)

        self.idb_path = get_idb_path()
        self.sha256 = ida_nalt.retrieve_input_file_sha256().lower()
        self.md5 = ida_nalt.retrieve_input_file_md5().lower()
Esempio n. 13
0
def log_all():
    data = {}
    idc.auto_wait()
    analyser = Analyser()
    if not analyser.valid:
        idc.qexit(-1)
    analyser.get_boot_services()
    module = idaapi.get_root_filename()
    boot_services = get_boot_services(analyser)
    protocols = get_protocols(analyser)
    data['module_name'] = module
    data['boot_services'] = boot_services
    data['protocols'] = protocols
    logs_dir = os.path.join(tempfile.gettempdir(), 'uefi-retool-all-info')
    if not os.path.isdir(logs_dir):
        os.mkdir(logs_dir)
    log_fname = os.path.join(
        logs_dir, '{}.json'.format(
            binascii.hexlify(ida_nalt.retrieve_input_file_md5()).decode()))
    with open(log_fname, 'w') as f:
        json.dump(data, f, indent=4)
    idc.qexit(0)