Exemplo n.º 1
0
    def buildCorpus(self):
        log.info("Initializing Corpus...")

        # Resetting Corpus to avoid at restart to have with ASLR more blocks than needed
        self.accumulated_coverage = set()

        corpus = [
            self.project.corpus_dir + "/" + x
            for x in os.listdir(self.project.corpus_dir)
        ]
        corpus.sort()
        #log.debug("Corpus: " + str(corpus))

        if len(corpus) == 0:
            log.warn(
                "Corpus is empty, please add files/directories with 'add'")
            return False

        for infile in corpus:
            fuzz_pkt = open(infile, "rb").read()
            coverage_last = None
            for i in range(5):
                t = time.strftime("%Y-%m-%d %H:%M:%S")
                log.update(t + " [iteration=%d] %s" % (i, infile))

                # send packet to target
                coverage = self.getCoverageOfPayload(fuzz_pkt)
                if coverage == None or len(coverage) == 0:
                    log.warn(
                        "No coverage was returned! you might want to delete %s from corpus if it happens more often"
                        % infile)

                #log.info("Iteration=%d  covlen=%d file=%s" % (i, len(coverage), infile))

                if coverage_last != None and coverage_last != coverage:
                    log.warn(t +
                             " [iteration=%d] Inconsistent coverage for %s!" %
                             (i, infile))
                    #log.info("diff a-b:" + " ".join([str(x) for x in coverage_last.difference(coverage)]))
                    #log.info("diff b-a:" + " ".join([str(x) for x in coverage.difference(coverage_last)]))

                coverage_last = coverage
                # Accumulate coverage:
                self.accumulated_coverage = self.accumulated_coverage.union(
                    coverage_last)

            write_drcov_file(
                self.modules, coverage_last,
                self.project.coverage_dir + "/" + infile.split("/")[-1])

        log.finish_update(
            "Using %d input files which cover a total of %d basic blocks!" %
            (len(corpus), len(self.accumulated_coverage)))
        self.corpus = corpus
        return True
Exemplo n.º 2
0
    def doIteration(self, seed=None, corpus=None):
        if seed == None:
            seed = self.project.seed
        if corpus == None:
            corpus = self.corpus

        start_time = time.time()
        for pkt_file in corpus:
            log.update("[seed=%d] " % seed +
                       time.strftime("%Y-%m-%d %H:%M:%S") + " %s" % pkt_file)
            #log.info(time.strftime("%Y-%m-%d %H:%M:%S") + " %s" % pkt_file)
            fuzz_pkt = check_output(["radamsa", "-s", str(seed), pkt_file])

            # Writing History file for replaying
            open(self.project.project_dir + "/frida_fuzzer.history",
                 "a").write(str(pkt_file) + "|" + str(seed) + "\n")

            try:
                coverage = self.getCoverageOfPayload(fuzz_pkt)
            except (frida.TransportError, frida.InvalidOperationError) as e:
                log.warn("doIteration: Got a frida error: " + str(e))
                log.info("Current iteration: " +
                         time.strftime("%Y-%m-%d %H:%M:%S") +
                         " [seed=%d] [file=%s]" % (seed, pkt_file))
                crash_file = self.project.crash_dir + time.strftime(
                    "/%Y%m%d_%H%M%S_crash")
                with open(crash_file + "_" + self.project.pid, "wb") as f:
                    f.write(fuzz_pkt)
                log.info("Payload is written to " + crash_file)
                self.project.crashes += 1
                return False

            if coverage == None:
                log.warn("No coverage was generated for [%d] %s!" %
                         (seed, pkt_file))
                continue

            if not coverage.issubset(self.accumulated_coverage):
                # New basic blocks covered!
                log.info("Found new path: [%d] %s" % (seed, pkt_file))
                newfile = open(
                    self.project.corpus_dir + "/" + str(seed) + "_" +
                    pkt_file.split("/")[-1], "wb")
                newfile.write(fuzz_pkt)
                newfile.close()

                cov_file = self.project.coverage_dir + "/" + pkt_file.split(
                    "/")[-1]
                write_drcov_file(self.modules, coverage, cov_file)
                write_drcov_file(
                    self.modules,
                    coverage.difference(self.accumulated_coverage),
                    cov_file + "_diff")

                self.project.last_new_path = seed
                self.accumulated_coverage = self.accumulated_coverage.union(
                    coverage)

            self.total_executions += 1

        end_time = time.time()
        speed = len(corpus) / (end_time - start_time)
        avg_speed = self.total_executions / (end_time - self.start_time)

        log.finish_update(
            "[seed=%d] speed=[%3d exec/sec (avg: %d)] coverage=[%d bblocks] corpus=[%d files] "
            "last new path: [%d] crashes: [%d]" %
            (seed, speed, avg_speed, len(self.accumulated_coverage),
             len(corpus), self.project.last_new_path, self.project.crashes))
        return True
Exemplo n.º 3
0
    def doMinimize(self):
        """
        This Function will minimize the current Corpus
        """
        log.info("Minimizing Corpus...")
        # Reset the accumulated coverage
        self.accumulated_coverage = set()

        corpus = [
            self.project.corpus_dir + "/" + x
            for x in os.listdir(self.project.corpus_dir)
        ]
        corpus.sort()

        if len(corpus) == 0:
            log.warn(
                "Corpus is empty, please use the 'add' subcommand to add files to it."
            )
            return False

        # Collect coverage
        dict_of_infile_coverages = {}
        loop_counter = 0
        for infile in corpus:
            loop_counter += 1
            fuzz_pkt = open(infile, "rb").read()
            failed_coverage_count = 0
            tmp_accu_cov = set()
            RETRIES = 5
            for i in range(RETRIES):
                t = time.strftime("%Y-%m-%d %H:%M:%S")
                log.update(
                    t +
                    " Collecting coverage for corpus files (%d/%d) ... [iteration=%d] %s"
                    % (loop_counter, len(corpus), i, infile))

                # send packet to target
                coverage = self.getCoverageOfPayload(fuzz_pkt, timeout=0.2)
                if coverage == None or len(coverage) == 0:
                    failed_coverage_count += 1
                    continue

                # Accumulate coverage:
                tmp_accu_cov = tmp_accu_cov.union(coverage)

            if failed_coverage_count == RETRIES:
                log.warn("Coverage for %s was always 0 (%d retries)" %
                         (infile, RETRIES))
                # note: file will be removed later..

            dict_of_infile_coverages[infile] = tmp_accu_cov
            self.accumulated_coverage = self.accumulated_coverage.union(
                tmp_accu_cov)
            write_drcov_file(
                self.active_target.modules, tmp_accu_cov,
                self.project.coverage_dir + "/" + infile.split("/")[-1])

        log.finish_update(
            "Collected coverage for corpus (%d basic blocks from %d files in corpus)"
            % (len(self.accumulated_coverage), len(corpus)))

        # Filter all corpus files with a coverage that is a direct subset of another corpus file
        loop_counter = 0
        for infile in corpus:
            loop_counter += 1
            log.update(
                "(%d/%d) Comparing %s (%d bblocks) against rest of the corpus..."
                % (loop_counter, len(corpus), infile,
                   len(dict_of_infile_coverages[infile])))
            for other_infile in [f for f in corpus if f != infile]:
                if dict_of_infile_coverages[infile].issubset(
                        dict_of_infile_coverages[other_infile]):
                    log.info(
                        "%s coverage is direct subset of %s. Moving to trash..."
                        % (infile, other_infile))
                    backup_file = self.project.corpus_trash_dir + "/" + infile.split(
                        "/")[-1]
                    shutil.move(infile, backup_file)
                    break

        corpus_new = [
            self.project.corpus_dir + "/" + x
            for x in os.listdir(self.project.corpus_dir)
        ]
        acc_cov_new = set.union(*dict_of_infile_coverages.values())
        log.finish_update(
            "Remaining input files: %d (total of %d basic blocks)." %
            (len(corpus_new), len(acc_cov_new)))
        self.corpus = corpus_new
        return True