Пример #1
0
    def __init__(
            self,
            n_people,
            reco_seed=DEFAULT_RECO_SEED,
            muta_seed=DEFAULT_MUTA_SEED,
            fake_seed=DEFAULT_FAKE_SEED):

        """Initialize with the number of people to put in the base records.
        Provodes variables for random seeds to be passed to,
          1) the random instance used to choose columns in this class
          2) the random instance used in the mutator module
          3) the random instance used in the faker module
        """

        # initialize mutator class
        self.muta = mutator.Mutator(seed=muta_seed)

        # initialize faker
        self.fake = Faker()
        self.fake.seed(fake_seed)

        # initialize local random
        self.random = random.Random()
        self.random.seed(reco_seed)

        # initialize empty DataFrame
        df_base = pandas.DataFrame(
            np.empty((n_people, len(COLS))) * np.nan,
            columns=COLS)

        # add unique person ID
        df_base['id'] = np.arange(n_people)

        # generate fake data
        for i in range(n_people):
            df_base.loc[i, 'first_name'] = self.fake.first_name()
            df_base.loc[i, 'last_name'] = self.fake.last_name()
            df_base.loc[i, 'zipcode'] = self.fake.zipcode()

        self.n_people = n_people
        self.df_base = df_base

        # initialize index trackers
        self.indx_min = self.df_base.index.min()
        self.indx_max = self.df_base.index.max()

        # initialize mutation history tracker
        self.history = defaultdict(list)
Пример #2
0
import BitArray2D
import mutator
import os
import image_utils

os.system("clear")
print "Executing Matt's GI Experiment\n"

iterationHashes = {}

#inputMatrix = image_utils.getBitArray2DFromBitmapFile(filename = "alice_and_dodo.bmp", width = 512)
#inputMatrix = BitArray2D.BitArray2D(bitstring = "11111111\n10011011\n11010101\n10011011\n10101011\n10011011\n10001101\n11111111")
inputMatrix = mutator.getRandomBitMatrix(width=256)

image_utils.writeBitArray2DImage(bitArray=inputMatrix,
                                 filename="00000" + "__initial.png")
m = mutator.Mutator(matrix=inputMatrix, topLevelInstructions=None)
iterationHashes[m.getSha1Hash()] = True

for i in xrange(0, 200):
    m.mutate(iteration=i)
    print "iteration", i, "complete"
    newHash = m.getSha1Hash()
    if newHash in iterationHashes:
        print "halting, cycle detected on iteration", i
        break
    iterationHashes[m.getSha1Hash()] = True
Пример #3
0
#
#ba[1, 1] = 1
#print ba
#print "---------------"
#
#someBit = ba[BitArray2D.godel(0, 0)]
#print "someBit =", someBit
#print "---------------"
#
#subArray = ba[BitArray2D.godel(0, 0) : BitArray2D.godel(2, 2)]
#print subArray
#print "---------------"
#
#ba[1, 1] = 0
#print subArray
#

#inputMatrix = BitArray2D.BitArray2D(bitstring = "1111\n0000\n1010\n0101")

inputMatrix = image_utils.getBitArray2DFromBitmapFile(
    filename="alice_and_dodo.bmp", width=512)
mutator.writeMatrixImage(matrix=inputMatrix, filename="test_00000.png")
m = mutator.Mutator(matrix=inputMatrix)

for i in xrange(0, 100):
    finalInstructions = m.mutate()
    print "iteration", i, "complete"
    m.printOperationStats()
    mutator.writeMatrixImage(matrix=inputMatrix,
                             filename="test_" + str(i + 1).zfill(5) + ".png")
Пример #4
0
def run_task(task, args, config):
    """ Runs a mutation testing task with settings in `args` and `config`.

    Collects results and produces CSV data.
    """
    flash_command = config['flash_command']

    port = args.port if args.port else utils.get_default_serial_port()
    os.environ['PORT'] = port
    timeout = int(args.timeout)
    csv = args.csv
    rng = random.Random(args.seed)

    mutation = mutator.Mutator(src=task['src'],
                               mutation_patterns=task['patterns'],
                               rng=rng)
    mutations_list = mutation.generateMutants(
        mutants_per_pattern=(task['mutants_per_pattern']
                             if 'mutants_per_pattern' in task else None),
        random=args.randomize)

    data_record = []
    trials = []
    test_to_kills = {}
    run_cnt = 0
    nc = 0
    total_failures = 0
    mutant_cnt = int(args.mutants)

    failures_per_pattern = {}
    total_per_pattern = {}
    for mp in mutation.getPatterns():
        if mp not in failures_per_pattern:
            failures_per_pattern[mp] = 0
        if mp not in total_per_pattern:
            total_per_pattern[mp] = 0

    # outer try is for finally generating csv if automation stops early
    try:
        for occurrence in mutations_list:
            if run_cnt == mutant_cnt:
                break

            mp = occurrence.pattern
            # mutate the code
            utils.yellow_print(occurrence)
            original_line, mutated_line = mutation.mutate(occurrence)
            file_changed = occurrence.file.rstrip(".old")
            line_number = occurrence.line
            # try is for catching compile failure to continue execution
            try:
                # cmake, build, flash, and read
                output, final_flag = flash_and_read(port, timeout,
                                                    flash_command)

                # reaching here means success, so change counters
                run_cnt += 1
                total_per_pattern[mp] += 1

                # tests expected to catch
                tests_expected_to_catch = "N/A"
                if args.line_coverage:
                    tests_expected_to_catch = ",".join(
                        get_expected_catch(args.line_coverage,
                                           int(line_number)))

                # mutant_status can either be "FAIL", "PASS", "CRASH", "TIMEOUT"
                mutant_status = "FAIL"
                if final_flag == FLAGS.PassFlag:
                    utils.red_print("Mutant is Alive")
                    utils.red_print(
                        "Tests that are expected to catch this mutant are: \n{}"
                        .format(tests_expected_to_catch))
                    mutant_status = "PASS"
                else:
                    failures_per_pattern[mp] += 1
                    total_failures += 1
                    utils.green_print("Mutant is Killed")
                if final_flag == FLAGS.CrashFlag:
                    mutant_status = "CRASH"
                elif final_flag == "TIMEOUT":
                    mutant_status = "TIMEOUT"

                # Analyze the output to count per test failures
                results = re.findall(TestRegEx, output)
                for group, test, result in results:
                    if (group, test) not in test_to_kills:
                        test_to_kills[(group, test)] = (
                            1, 1) if result == 'FAIL' else (0, 1)
                    else:
                        kills, total = test_to_kills[(group, test)]
                        test_to_kills[(group,
                                       test)] = ((kills + 1, total +
                                                  1) if result == 'FAIL' else
                                                 (kills, total + 1))

                # Add result to CSV queue
                trials.append({
                    'file':
                    file_changed,
                    'line':
                    line_number,
                    'original':
                    original_line,
                    'mutant':
                    mutated_line,
                    'result':
                    "{}/KILLED".format(mutant_status)
                    if mutant_status != "PASS" else "PASS/LIVE",
                    'expected_catch':
                    tests_expected_to_catch
                })
                utils.yellow_print("Successful Mutant Runs: {}/{}".format(
                    run_cnt, mutant_cnt))
            except CompileFailed:
                utils.yellow_print("Cannot compile, discard and move on")
                nc += 1
            finally:
                mutation.restore()
    except:
        traceback.print_exc()
        raise
    finally:
        mutation.cleanup()
        # calculate mutant score
        score = percentage(total_failures, run_cnt)
        utils.yellow_print("Score: {}%".format(score))
        utils.yellow_print(
            "Alive: {} Killed: {} Mutants: {} No-Compile: {} Attempted Runs: {}"
            .format(run_cnt - total_failures, total_failures, run_cnt, nc,
                    run_cnt + nc))
        trials.append({
            'file':
            "RESULTS:",
            'line':
            "{} NO-COMPILE".format(nc),
            'mutant':
            "SCORE",
            'original':
            "{} KILLED/{} MUTANTS".format(total_failures, run_cnt),
            'result':
            "{}%".format(score)
        })

        # aggregate pass/fail counts for each found test in test group
        aggregates = []
        for group, test in test_to_kills:
            kills, total = test_to_kills[(group, test)]
            aggregates.append({
                'Group': group,
                'Test': test,
                'Fails': kills,
                'Passes': total - kills,
                'Total': total
            })

        # pattern comparison
        for mp in total_per_pattern:
            data_record.append({
                'pattern':
                "{} => {}".format(mp.pattern, mp.transformation),
                'failures':
                failures_per_pattern[mp],
                'total':
                total_per_pattern[mp],
                'percentage':
                float(
                    percentage(failures_per_pattern[mp],
                               total_per_pattern[mp])) *
                0.01 if total_per_pattern[mp] > 0 else 2
            })

        # log to csv
        if csv:
            csv_path = os.path.join(
                dir_path, "csvs/{}/{}".format(current_date, current_time))
            pattern_csv = os.path.join(
                csv_path, "{}_pattern_comparison.csv".format(task['name']))
            trials_csv = os.path.join(
                csv_path, "{}_mutants_created.csv".format(task['name']))
            per_test_csv = os.path.join(
                csv_path, "{}_test_aggregates.csv".format(task['name']))
            to_csv(pattern_csv, ['pattern', 'failures', 'total', 'percentage'],
                   data_record)
            to_csv(trials_csv, [
                'file', 'line', 'original', 'mutant', 'result',
                'expected_catch'
            ], trials)
            to_csv(per_test_csv, ['Group', 'Test', 'Fails', 'Passes', 'Total'],
                   aggregates)
Пример #5
0
#	If inc and desired output inc, apply nand, return 1.
#	else if dec and desired output dec, apply nand, return 1.
#   return 0 // nand would not take entropy in desired direction, do nothing
# if top layer, top instcution = result
# inc/dec constant going down, along with top instruction

iterationHashes = {}

#inputMatrix = image_utils.getBitArray2DFromBitmapFile(filename = "alice_and_dodo.bmp", width = 512)
inputMatrix = BitArray2D.BitArray2D(
    bitstring=
    "00000000\n00011011\n01010101\n00011011\n10101010\n00011011\n11111111\n00011011"
)
mutator.writeMatrixImage(matrix=inputMatrix, filename="test_00000.png")
m = mutator.Mutator(matrix=inputMatrix,
                    topLevelInstructions="0101",
                    targetEntropy=0.5)
iterationHashes[m.getSha1Hash()] = True

for i in xrange(0, 3):
    m.mutate()
    print "iteration", i, "complete"
    mutator.writeMatrixImage(matrix=m.matrix,
                             filename="test_" + str(i + 1).zfill(5) + ".png")

    newHash = m.getSha1Hash()
    if newHash in iterationHashes:
        print "halting, cycle detected on iteration", i
        break
    iterationHashes[m.getSha1Hash()] = True