Esempio n. 1
0
    def is_same_crash(self):

        # get debugger output filename
        (fd, f) = tempfile.mkstemp(dir=self.tempdir,
                                   prefix="minimizer_is_same_crash_")
        os.close(fd)
        if os.path.exists(f):
            delete_files(f)
        if os.path.exists(f):
            raise MinimizerError('Unable to get temporary debug file')

        # create debugger output
        dbg = self.run_debugger(self.tempfile, f)

        if dbg.is_crash:
            newfuzzed_hash = self.get_signature(dbg, self.backtracelevels)
        else:
            newfuzzed_hash = None
        # initialize or increment the counter for this hash
        if newfuzzed_hash in self.crash_sigs_found:
            self.crash_sigs_found[newfuzzed_hash] += 1
        elif not newfuzzed_hash:
            # don't do anything with non-crashes
            pass
        else:
            # the testcase is new to this minimization run
            self.crash_sigs_found[newfuzzed_hash] = 1
            self.logger.info('testcase=%s signal=%s', newfuzzed_hash,
                             dbg.signal)

            if self.save_others and newfuzzed_hash not in self.crash_hashes:
                # the testcase is not one of the crashes we're looking for
                # so add it to the other_crashes dict in case our
                # caller wants to do something with it
                newcrash = self._crash_builder()
                if newcrash.is_crash:
                    # note that since we're doing this every time we see a testcase
                    # that's not in self.crash_hashes, we're also effectively
                    # keeping only the smallest hamming distance version of
                    # newfuzzed_hash as we progress through the minimization
                    # process
                    self.other_crashes[newfuzzed_hash] = newcrash

        # ditch the temp file
        delete_files(dbg.file)
        if os.path.exists(dbg.file):
            raise MinimizerError('Unable to remove temporary debug file')

        return newfuzzed_hash in self.crash_hashes
Esempio n. 2
0
 def _raise(self, description):
     # Minimizer has separate logging. Close up handles before
     # raising any exception
     try:
         self.log_file_hdlr.close()
         self.logger.removeHandler(self.log_file_hdlr)
     except:
         pass
     raise MinimizerError(description)
Esempio n. 3
0
    def swap_bytes(self):
        newfuzzed = []
        newfuzzed_hd = self.min_distance

        if not 0.0 < self.discard_chance < 1.0:
            raise MinimizerError("Discard chance out of range")

        # it's possible we could get a zero-distance newfuzz
        # or that we didn't drop any bytes at all
        # so keep trying until both are true
        while not (0 < newfuzzed_hd < self.min_distance):
            newfuzzed, newfuzzed_hd = self.swap_func(self.seed,
                                                     self.fuzzed_content)

        # we know our hd is > 0 and < what it was when we started
        self.newfuzzed = newfuzzed
        self.newfuzzed_hd = newfuzzed_hd
        self.newfuzzed_md5 = hashlib.md5(''.join(self.newfuzzed)).hexdigest()
Esempio n. 4
0
    def go(self):
        # start by copying the fuzzed_content file since as of now it's our
        # best fit
        filetools.copy_file(self.testcase.fuzzedfile.path, self.outputfile)

        # replace the fuzzedfile object in testcase with the minimized copy
        self.testcase.fuzzedfile = BasicFile(self.outputfile)

        self.logger.info('Attempting to minimize testcase(es) [%s]',
                         self._crash_hashes_string())

        # keep going until either:
        # a. we find a minimum hd of 1
        # b. we run out of discard_chances
        # c. our discard_chance * minimum hd is less than one (we won't discard anything)
        # d. we've exhaustively searched all the possible files with hd less
        # than self.min_distance
        while not self.min_found and not self.try_exhaustive:

            if not self.set_discard_chance():
                break

            if not self.set_n_misses():
                break

            got_hit = False
            while self.consecutive_misses <= self.n_misses_allowed:

                if self.use_watchdog:
                    # touch the watchdog file so we don't reboot during long
                    # minimizations
                    open(self.watchdogfile, 'w').close()

                # Fix for BFF-208
                if self._time_exceeded():
                    logger.info(
                        'Max time for minimization exceeded, ending minimizer early.'
                    )
                    self.min_found = True
                    break

                if not self.set_discard_chance():
                    break

                if not self.set_n_misses():
                    break

                self.swap_bytes()

                self.total_tries += 1

                # have we been at this level before?
                if not self.files_tried_at_hd.get(self.min_distance):
                    # we've reached a new minimum, so create new sets
                    self.files_tried_at_hd[self.min_distance] = set()
                    self.files_tried_singlebyte_at_hd[
                        self.min_distance] = set()

                # have we exhausted all the possible files with smaller hd?
                possible_files = (2**self.min_distance) - 2
                seen_files = len(self.files_tried_at_hd[self.min_distance])

                # maybe we're done?
                if seen_files == possible_files:
                    # we've exhaustively searched everything with hd <
                    # self.min_distance
                    self.logger.info(
                        'Exhaustively searched all files shorter than %d',
                        self.min_distance)
                    self.min_found = True
                    break

                # have we exhausted all files that are 1 byte smaller hd?
                possible_singlebyte_diff_files = self.min_distance
                singlebyte_diff_files_seen = len(
                    self.files_tried_singlebyte_at_hd[self.min_distance])

                # maybe we're done?
                if singlebyte_diff_files_seen == possible_singlebyte_diff_files:
                    self.logger.info(
                        'We have tried all %d files that are one byte closer than the current minimum',
                        self.min_distance)
                    self.min_found = True
                    break

                # remember this file for next time around
                self.files_tried_at_hd[self.min_distance].add(
                    self.newfuzzed_md5)
                if self.newfuzzed_hd == (self.min_distance - 1):
                    self.files_tried_singlebyte_at_hd[self.min_distance].add(
                        self.newfuzzed_md5)

                self.print_intermediate_log()

                if self.newfuzzed_md5 in self.files_tried:
                    # we've already seen this attempt, so skip ahead to the next one
                    # but still count it as a miss since our math assumes we're putting
                    # the marbles back in the jar after each draw
                    self.consecutive_misses += 1
                    self.total_misses += 1
                    continue

                # we didn't skip ahead, so it must have been new. Remember it
                # now
                self.files_tried.add(self.newfuzzed_md5)

                # we have a better match, write it to a file
                if not len(self.newfuzzed):
                    raise MinimizerError(
                        'New fuzzed_content content is empty.')

                self._write_file()

                if self.is_same_crash():
                    # record the result
                    # 1. copy the tempfile
                    filetools.best_effort_move(self.tempfile, self.outputfile)
                    # 2. replace the fuzzed_content file in the crasher with
                    # the current one
                    self.testcase.fuzzedfile = BasicFile(self.outputfile)
                    # 3. replace the current fuzzed_content with newfuzzed
                    self.fuzzed_content = self.newfuzzed
                    self.min_distance = self.newfuzzed_hd

                    got_hit = True

                    if self.min_distance == 1:
                        # we are done
                        self.min_found = True
                    elif self.newfuzzed_hd <= self.exhaustivesearch_threshold:
                        self._set_bytemap()
                        logger.info(
                            'Exhaustively checking remaining %s bytes' %
                            self.newfuzzed_hd)
                        self.try_exhaustive = True
                        break
                    else:
                        # set up for next iteration
                        self.consecutive_misses = 0
                        if not self.set_discard_chance():
                            break
                        if not self.set_n_misses():
                            break
                else:
                    # we missed. increment counter and try again
                    self.total_misses += 1
                    self.consecutive_misses += 1

                    # Fix for BFF-225
                    # There may be some situation that causes testcase uniqueness
                    # hashing to break. (e.g. BFF-224 ). Minimizer should bail
                    # if the number of unique crashes encountered exceeds some
                    # threshold. e.g. 20 maybe?
                    if len(self.other_crashes
                           ) > MAX_OTHER_CRASHES and self.seedfile_as_target:
                        logger.info(
                            'Exceeded maximum number of other crashes (%d), ending minimizer early.',
                            MAX_OTHER_CRASHES)
                        self.min_found = True
                        break

            if not got_hit:
                # we are self.confidence_level sure that self.target_size_guess is wrong
                # so increment it by 1
                self.target_size_guess += 1

        if self.try_exhaustive:
            for offset in list(self.bytemap):
                logger.debug('Verifying byte location: %s' % hex(offset))
                self.revert_byte(offset)
                self._write_file()
                if self.is_same_crash():
                    logger.debug('Fuzzed byte at offset %s is not relevant' %
                                 hex(offset))
                    filetools.best_effort_move(self.tempfile, self.outputfile)
                    self.testcase.fuzzedfile = BasicFile(self.outputfile)
                    self.fuzzed_content = self.newfuzzed
                    self.bytemap.remove(offset)

        # We're done minimizing. Set the bytemap (kept bytes)
        self._set_bytemap()
        self.logger.info('We were looking for [%s] ...',
                         self._crash_hashes_string())
        for (md5, count) in self.crash_sigs_found.items():
            self.logger.info('\t...and found %s\t%d times', md5, count)
        if self.bytemap:
            hex_bytemap = []
            for offset in self.bytemap:
                hex_bytemap.append(hex(offset))
            self.logger.info('Bytemap: %s', hex_bytemap)