Beispiel #1
0
def test_control_flow():
    state = main([
        "hase", "replay",
        str(
            TEST_BIN.join("control_flow",
                          "control_flow-20180515T180451.tar.gz"))
    ])
Beispiel #2
0
def test_loopy():
    state = main([
        "hase", "replay",
        str(TEST_BIN.join("loopy", "loopy-20180514T145114.tar.gz"))
    ])
    last_state = state[-1]
    # We called loopy with 6 arguments:
    # ./loopy a b c d e
    nose.tools.eq_(argc(last_state.simstate), 6)
Beispiel #3
0
def test_control_flow():
    control_flow = TEST_ROOT.bin.join("control_flow")
    exe = control_flow.join("control_flow").str()
    core = control_flow.join("control_flow-20180404T163033.coredump").str()
    trace = control_flow.join("control_flow-20180404T163033.trace").str()

    state = main(["hase", "replay", exe, trace, core])
    last_state = state[-1]
    nose.tools.eq_(last_state.simstate.solver.eval(last_state.simstate.regs.rip), 0x400a05)
Beispiel #4
0
def test_control_loopy():
    control_flow = TEST_ROOT.bin.join("loopy")
    exe = control_flow.join("loopy").str()
    trace = control_flow.join("loopy-20180404T162955.trace").str()
    core = control_flow.join("loopy-20180404T162955.coredump").str()

    state = main(["hase", "replay", exe, trace, core])
    last_state = state[-1]
    # We called loopy with 6 arguments:
    # ./loopy a b c d e
    nose.tools.eq_(argc(last_state.simstate), 6)
Beispiel #5
0
def test_record_command() -> None:
    """
    Full integration test
    """
    if os.geteuid() != 0:
        raise SkipTest("Requires root")
    with TemporaryDirectory() as tempdir:
        temppath = Path(tempdir)
        pid_file = str(temppath.joinpath("record.pid"))
        # generate coredump
        loopy = str(TEST_BIN.joinpath("loopy"))
        argv = [
            "hase",
            "record",
            "--log-dir",
            str(tempdir),
            "--limit",
            "1",
            "--pid-file",
            pid_file,
            loopy,
            "a",
            "b",
            "c",
            "d",
            "e",
        ]
        global process

        # python replaces stdin with /dev/null in the child process...
        # we want stdin for pdb
        stdin_copy = open("/proc/self/fd/0")

        def mymain(args):
            # type: (List[str]) -> None
            sys.stdin = stdin_copy
            main(args)

        process = Process(target=mymain, args=(argv,))
        process.start()

        while not os.path.exists(pid_file):
            nose.tools.assert_true(process.is_alive())
            sleep(0.1)

        process.join()

        archives = list(temppath.glob("*.tar.gz"))
        nose.tools.assert_equal(len(archives), 1)

        states = main(["hase", "replay", str(archives[0])])
        nose.tools.assert_true(len(states) > 10)
Beispiel #6
0
def test_loopy() -> None:
    state = main([
        "hase", "replay",
        str(TEST_TRACES.joinpath("loopy-20181009T182008.tar.gz"))
    ])
    (last_state, is_new) = state[len(state) - 2]
    # We called loopy with 6 arguments:
    # ./loopy a b c d e
    s = last_state.simstate
    # loopy does not touch rsp so we can get the location of argc by dereferencing
    # the top of the stack
    argc = s.mem[s.solver.eval(s.regs.rsp)].uint64_t.concrete
    nose.tools.eq_(argc, 6)
Beispiel #7
0
 def mymain(args):
     # type: (List[str]) -> None
     sys.stdin = stdin_copy
     main(args)
Beispiel #8
0
def test_control_flow() -> None:
    state = main([
        "hase",
        "replay",
        str(TEST_TRACES.joinpath("control_flow-20181003T145029.tar.gz")),
    ])
Beispiel #9
0
    def test_meta_analysis(self):
        # Reference for mapper has to be downloaded
        assert os.path.isfile(
            os.path.join(self.project_root, "data", "1000Gp1v3.ref.gz"))
        assert os.path.isfile(
            os.path.join(self.project_root, "data", "1000Gp1v3.ref_info.h5"))

        # Start by running mapper?
        mapper_directory = os.path.join(self.test_dir, "mapper", "")
        hdf5_genotype_directory = os.path.join(self.resources_dir,
                                               "exampledataset",
                                               "genotypes_hdf5")
        mapper.main([
            "-g", hdf5_genotype_directory, "-study_name", "dosage", "-o",
            mapper_directory
        ])

        # Encode the genotype and phenotype files
        encoding_output = os.path.join(self.test_dir, "encoded")
        phenotype_matrix = os.path.join(self.resources_dir, "exampledataset",
                                        "phenotype")
        hase.main([
            "-g", hdf5_genotype_directory, "-study_name", "dosage", "-o",
            encoding_output, "-mapper", mapper_directory, "-ph",
            phenotype_matrix, "-mode"
            "encoding"
        ])

        # Calculate partial derivatives
        partial_derivatives = os.path.join(self.test_dir, "pd")
        hase.main([
            "-g", hdf5_genotype_directory, "-study_name", "dosage", "-o",
            partial_derivatives, "-mapper", mapper_directory, "-ph",
            phenotype_matrix, "-cov",
            os.path.join(self.resources_dir, "exampledataset",
                         "covariates"), "-mode"
            "encoding"
        ])

        # Create directory structure required for meta-analysis
        # Define directory names.
        genotype_meta = os.path.join(self.test_dir, "meta", "genotype_encoded")
        individuals_meta = os.path.join(genotype_meta, "individuals")
        actual_genotype_meta = os.path.join(genotype_meta, "genotype")
        phenotype_meta = os.path.join(self.test_dir, "meta", "phenotype")
        partial_derivatives_meta = os.path.join(self.test_dir, "meta",
                                                "pd_shared")

        # Make the required directories
        os.mkdir(actual_genotype_meta)
        os.mkdir(individuals_meta)
        os.mkdir(partial_derivatives_meta)
        os.mkdir(phenotype_meta)

        # Copy the required data
        shutil.copytree(os.path.join(hdf5_genotype_directory, "probes"),
                        genotype_meta)
        distutils.dir_util.copy_tree(
            os.path.join(encoding_output, "encode_individuals"),
            individuals_meta)
        distutils.dir_util.copy_tree(
            os.path.join(encoding_output, "encode_genotype"),
            actual_genotype_meta)
        distutils.dir_util.copy_tree(
            os.path.join(encoding_output, "encode_phenotype"), phenotype_meta)
        for file in glob.glob(os.path.join(partial_derivatives, "*.npy")):
            shutil.copy(file, partial_derivatives_meta)

        # Perform meta-analysis
        results_directory = os.path.join(self.test_dir, "results")
        hase.main([
            "-g", genotype_meta, "-study_name", "dosage", "-ph",
            phenotype_meta, "-derivatives", partial_derivatives_meta,
            "-mapper", mapper_directory, "-o", results_directory, "-mode"
            "meta-stage"
        ])

        # (Generate the output file)

        # Read in the actual results
        hase_results = get_hase_results(results_directory)
        # Read in the expected results
        difference = get_sklearn_regression_difference(self.resources_dir,
                                                       hase_results)