예제 #1
0
파일: test.py 프로젝트: DSLituiev/FaST-LMM
    if True:  #Standard test run
        r = unittest.TextTestRunner(failfast=False)
        r.run(suites)
    else:  #Cluster test run
        task_count = 150
        runner = HPC(
            task_count,
            'RR1-N13-09-H44',
            r'\\msr-arrays\Scratch\msr-pool\Scratch_Storage6\Redmond',
            remote_python_parent=
            r"\\msr-arrays\Scratch\msr-pool\Scratch_Storage6\REDMOND\carlk\Source\carlk\july_7_14\pythonpath",
            update_remote_python_parent=True,
            min=150,
            priority="AboveNormal",
            mkl_num_threads=1)
        runner = Local()
        runner = LocalMultiProc(taskcount=20, mkl_num_threads=5)
        #runner = LocalInParts(1,2,mkl_num_threads=1) # For debugging the cluster runs
        #runner = Hadoop2(100, mapmemory=8*1024, reducememory=8*1024, mkl_num_threads=1, queue="default")
        distributable_test = DistributableTest(suites, "temp_test")
        print runner.run(distributable_test)

    debian_count = len(os.listdir('expected-debian'))
    if debian_count > 0:
        logging.warn(
            "The tests contain {0} expected-results files that differ between Windows and Debian"
            .format(debian_count))

    logging.info("done with testing")
예제 #2
0
    if True:  #Standard test run
        r = unittest.TextTestRunner(failfast=False)
        r.run(suites)
    else:  #Cluster test run
        logging.basicConfig(level=logging.INFO)

        from fastlmm.util.distributabletest import DistributableTest

        remote_python_parent = r"\\GCR\Scratch\RR1\escience\carlk\data\carlk\pythonpath06292016"
        runner = HPC(
            2,
            'GCR',
            r"\\GCR\Scratch\RR1\escience",
            remote_python_parent=remote_python_parent,
            unit='node',  #core, socket, node
            update_remote_python_parent=True,
            template="Preemptable",
            priority="Lowest",
            nodegroups="Preemptable",
            #excluded_nodes=['gcrcn0231'],
            runtime="0:11:0",  # day:hour:min
            max=10)
        #runner = Local()
        #runner = LocalMultiProc(taskcount=2,mkl_num_threads=5,just_one_process=False)
        #runner = LocalInParts(0,2,mkl_num_threads=1) # For debugging the cluster runs
        #runner = Hadoop(100, mapmemory=8*1024, reducememory=8*1024, mkl_num_threads=1, queue="default")
        distributable_test = DistributableTest(suites, "temp_test")
        print(runner.run(distributable_test))

    logging.info("done with testing")
예제 #3
0
    if True: #Standard test run
        r = unittest.TextTestRunner(failfast=False)
        r.run(suites)
    else: #Cluster test run
        logging.basicConfig(level=logging.INFO)

        from fastlmm.util.distributabletest import DistributableTest

        remote_python_parent=r"\\GCR\Scratch\RR1\escience\carlk\data\carlk\pythonpath06292016"
        runner = HPC(2, 'GCR',r"\\GCR\Scratch\RR1\escience",
                                                    remote_python_parent=remote_python_parent,
                                                    unit='node', #core, socket, node
                                                    update_remote_python_parent=True,
                                                    template="Preemptable",
                                                    priority="Lowest",
                                                    nodegroups="Preemptable",
                                                    #excluded_nodes=['gcrcn0231'],
                                                    runtime="0:11:0", # day:hour:min
                                                    max = 10
                                                    )
        #runner = Local()
        #runner = LocalMultiProc(taskcount=2,mkl_num_threads=5,just_one_process=False)
        #runner = LocalInParts(0,2,mkl_num_threads=1) # For debugging the cluster runs
        #runner = Hadoop(100, mapmemory=8*1024, reducememory=8*1024, mkl_num_threads=1, queue="default")
        distributable_test = DistributableTest(suites,"temp_test")
        print runner.run(distributable_test)


    logging.info("done with testing")
예제 #4
0
        #Because both pysnptools and fastlmm contain a tests folder, to run on cluster must have fastlmm listed first.

        #!!! The HPC runner doesn't seem to be returning failed tests even when they are there
        runner = HPC(task_count, 'GCR',r"\\GCR\Scratch\RR1\escience",
                                                    remote_python_parent=remote_python_parent,
                                                    unit='node', #core, socket, node
                                                    update_remote_python_parent=True,
                                                    template="Preemptable",
                                                    priority="Lowest",
                                                    nodegroups="Preemptable",
                                                    #excluded_nodes=['gcrcn0231'],
                                                    runtime="0:11:0", # day:hour:min
                                                    max = 100
                                                    )


        #runner = Local()
        runner = LocalMultiProc(taskcount=12,mkl_num_threads=5,just_one_process=False)
        #runner = LocalInParts(1,2,mkl_num_threads=1) # For debugging the cluster runs
        #runner = Hadoop2(100, mapmemory=8*1024, reducememory=8*1024, mkl_num_threads=1, queue="default")
        distributable_test = DistributableTest(suites,"temp_test")
        runner.run(distributable_test)

    debian_count = len(os.listdir('expected-debian'))
    if debian_count > 0:
        logging.warn("The tests contain {0} expected-results files that differ between Windows and Debian".format(debian_count))


    logging.info("done with testing")