예제 #1
0
def run_pyctest():

    cwd = os.getcwd()
    # process standard arguments
    helpers.ArgumentParser("PyCTest", cwd, cwd, vcs_type="git",
                           build_type="MinSizeRel").parse_args()
    # base web address of dashboard
    pyctest.DROP_SITE = "cdash.nersc.gov"
    # custom setup.py command (runs CMake)
    pyctest.CONFIGURE_COMMAND = "python setup.py configure"
    # build and install
    pyctest.BUILD_COMMAND = "python setup.py install"
    # basic example test
    examples_dir = os.path.join(pyctest.SOURCE_DIRECTORY, "examples")
    basic_dir = os.path.join(examples_dir, "Basic")
    pyctest.test(name="basic", cmd=["python", "basic.py", "--", "-VV"],
                 properties={"WORKING_DIRECTORY": basic_dir})
    # tomopy example test
    tomopy_dir = os.path.join(examples_dir, "TomoPy")
    pyctest.test(name="tomopy", cmd=["python", "pyctest_tomopy.py",
                                     "--pyctest-stages", "Start",
                                     "Configure", "Build", "--", "-VV"],
                 properties={"WORKING_DIRECTORY": tomopy_dir})
    # run stages
    pyctest.run()
예제 #2
0
파일: __init__.py 프로젝트: tomopy/tomopy
def create_phantom_test(args, bench_props, phantom):
    """
    Create test(s) for the specified algorithms
    """
    pyexe = pyctest.PYTHON_EXECUTABLE
    this_dir = os.path.dirname(__file__)

    # skip when generating C coverage
    if args.coverage or args.disable_phantom_tests:
        return

    nalgs = len(args.algorithms)
    psize = args.phantom_size

    # construct test name
    name = ((phantom + "_") +
            ("".join(args.algorithms) if nalgs == 1 else "comparison"))

    # determine phantom size
    nsize = psize if phantom != "shepp3d" else psize // 2

    # customize name
    if psize != default_phantom_size:
        name = "{}_pix{}".format(name, nsize)
    if args.num_iter != default_nitr:
        name = "{}_itr{}".format(name, args.num_iter)

    # test arguments
    test_args = [
        "-A", "360",
        "-f", "jpeg",
        "-S", "1",
        "-p", phantom,
        "-s", "{}".format(nsize),
        "-n", "{}".format(args.ncores),
        "-i", "{}".format(args.num_iter),
        "--output-dir", os.path.join(this_dir, name)
    ]
    test_args.append("-a" if nalgs == 1 else "--compare")
    test_args.extend(args.algorithms)

    # test properties
    test_props = bench_props
    if phantom.lower() == "shepp3d":
        test_props["RUN_SERIAL"] = "ON"

    # test command
    cmd = [pyexe, "-Om", "benchmarking.phantom"] + test_args

    # create test
    pyctest.test(name, cmd, properties=test_props)
예제 #3
0
def run_pyctest():
    '''
    Configure PyCTest and execute
    '''
    # run argparse, checkout source, copy over files
    args = configure()

    # make sure binary directory is clean
    cmd = pyctest.command(["make", "clean"])
    cmd.SetErrorQuiet(True)
    cmd.SetWorkingDirectory(pyctest.BINARY_DIRECTORY)
    cmd.Execute()

    # make sure there is not an existing (old) Testing directory
    if os.path.exists(os.path.join(pyctest.BINARY_DIRECTORY, "Testing")):
        shutil.rmtree(os.path.join(pyctest.BINARY_DIRECTORY, "Testing"))

    #   BUILD_NAME
    pyctest.BUILD_NAME = "{}-{}".format(args.team, args.compiler)
    pyctest.BUILD_COMMAND = "make COMP={}".format(args.compiler)

    # properties
    bench_props = {
        "TIMEOUT": "10800",
        "WORKING_DIRECTORY": os.path.join(pyctest.BINARY_DIRECTORY)
    }

    # create test
    pyctest.test("{}".format(args.type),
                 ["srun", "./gpp.ex", "{}".format(args.type)],
                 properties=bench_props)

    print('Running PyCTest:\n\n\t{}\n\n'.format(pyctest.BUILD_NAME))

    pyctest.run()

    # remove these files
    files = [
        "CTestConfig.cmake", "CTestCustom.cmake", "CTestTestfile.cmake",
        "Init.cmake", "Stages.cmake", "Utilities.cmake"
    ]

    try:
        if args.post_cleanup:
            for f in files:
                if os.path.exists(os.path.join(pyctest.BINARY_DIRECTORY, f)):
                    os.remove(os.path.join(pyctest.BINARY_DIRECTORY, f))
            shutil.rmtree(os.path.join(pyctest.BINARY_DIRECTORY, "Testing"))
    except:
        pass
예제 #4
0
def create_phantom_test(args, bench_props, phantom):
    """
    Create test(s) for the specified algorithms
    """
    pyexe = pyctest.PYTHON_EXECUTABLE
    this_dir = os.path.dirname(__file__)

    # skip when generating C coverage
    if args.coverage or args.disable_phantom_tests:
        return

    nalgs = len(args.algorithms)
    psize = args.phantom_size

    # construct test name
    name = ((phantom + "_") +
            ("".join(args.algorithms) if nalgs == 1 else "comparison"))

    # determine phantom size
    nsize = psize if phantom != "shepp3d" else psize // 2

    # customize name
    if psize != default_phantom_size:
        name = "{}_pix{}".format(name, nsize)
    if args.num_iter != default_nitr:
        name = "{}_itr{}".format(name, args.num_iter)

    # test arguments
    test_args = [
        "-A", "360", "-f", "jpeg", "-S", "1", "-p", phantom, "-s",
        "{}".format(nsize), "-n", "{}".format(args.ncores), "-i",
        "{}".format(args.num_iter), "--output-dir",
        os.path.join(this_dir, name)
    ]
    test_args.append("-a" if nalgs == 1 else "--compare")
    test_args.extend(args.algorithms)

    # test properties
    test_props = bench_props
    if phantom.lower() == "shepp3d":
        test_props["RUN_SERIAL"] = "ON"

    # test command
    cmd = [pyexe, "-Om", "benchmarking.phantom"] + test_args

    # create test
    pyctest.test(name, cmd, properties=test_props)
예제 #5
0
파일: __init__.py 프로젝트: tomopy/tomopy
def create_globus_test(args, bench_props, algorithm, phantom):
    """
    Create a test from TomoBank (data provided by globus)
    """
    # skip when generating C coverage when globus enabled
    if args.coverage or args.globus_path is None:
        return

    pyexe = pyctest.PYTHON_EXECUTABLE
    this_dir = os.path.dirname(__file__)

    name = "{}_{}".format(phantom, algorithm)
    # original number of iterations before num-iter added to test name
    if args.num_iter != 10:
        name = "{}_itr{}".format(name, args.num_iter)

    global_args = [
        "--type", "slice",
        "-f", "jpeg",
        "-S", "1",
        "-c", "4",
        "-n", "{}".format(args.ncores),
        "-i", "{}".format(args.num_iter)
    ]

    # try this path
    h5file = os.path.join(args.globus_path, phantom + ".h5")

    # alternative path
    if not os.path.exists(h5file):
        h5file = os.path.join(args.globus_path, phantom, phantom + ".h5")

    # could not locate
    if not os.path.exists(h5file):
        print("HDF5 file '{}' does not exist.".format(h5file))
        cmd = [pyexe, "-c",
               "print(\"No valid path to '{}'\")".format(h5file)]
        h5file = None
    else:
        cmd = [pyexe, "-Om", "benchmarking.rec", h5file]
        cmd += (global_args +
                ["-a", algorithm,
                "-o", os.path.join(this_dir, name)])

    pyctest.test(name, cmd, properties=bench_props)
예제 #6
0
def create_globus_test(args, bench_props, algorithm, phantom):
    """
    Create a test from TomoBank (data provided by globus)
    """
    # skip when generating C coverage when globus enabled
    if args.coverage or args.globus_path is None:
        return

    pyexe = pyctest.PYTHON_EXECUTABLE
    this_dir = os.path.dirname(__file__)

    name = "{}_{}".format(phantom, algorithm)
    # original number of iterations before num-iter added to test name
    if args.num_iter != 10:
        name = "{}_itr{}".format(name, args.num_iter)

    global_args = [
        "--type", "slice", "-f", "jpeg", "-S", "1", "-c", "4", "-n",
        "{}".format(args.ncores), "-i", "{}".format(args.num_iter)
    ]

    # try this path
    h5file = os.path.join(args.globus_path, phantom + ".h5")

    # alternative path
    if not os.path.exists(h5file):
        h5file = os.path.join(args.globus_path, phantom, phantom + ".h5")

    # could not locate
    if not os.path.exists(h5file):
        print("HDF5 file '{}' does not exist.".format(h5file))
        cmd = [pyexe, "-c", "print(\"No valid path to '{}'\")".format(h5file)]
        h5file = None
    else:
        cmd = [pyexe, "-Om", "benchmarking.rec", h5file]
        cmd += (global_args +
                ["-a", algorithm, "-o",
                 os.path.join(this_dir, name)])

    pyctest.test(name, cmd, properties=bench_props)
예제 #7
0
def create_correct_module_test():
    """
    Create a test that checks we are using the locally build module
    """
    pyexe = pyctest.PYTHON_EXECUTABLE
    binary_dir = pyctest.BINARY_DIRECTORY

    # test properties
    props = {
        "WORKING_DIRECTORY": binary_dir,
        "RUN_SERIAL": "ON",
        "LABEL": "unit"
    }

    # test command
    cmd = [
        pyexe, "-c", "\"import os, sys, tomopy; " +
        "print('using tomopy module: {}'.format(tomopy.__file__)); " +
        "ret=0 if os.getcwd() in tomopy.__file__ else 1; " + "sys.exit(ret)\""
    ]

    pyctest.test("correct_module", cmd, props)
예제 #8
0
파일: __init__.py 프로젝트: tomopy/tomopy
def create_coverage_test(args):
    """
    Create test that generates the coverage
    """
    source_dir = pyctest.SOURCE_DIRECTORY
    binary_dir = pyctest.BINARY_DIRECTORY
    pycoverage = find_python_coverage()

    # test properties
    props = {
        "WORKING_DIRECTORY": binary_dir,
        "DEPENDS": "nosetests",
        "RUN_SERIAL": "ON",
        "LABEL": "unit"
    }

    # test command
    cmd = [os.path.join(source_dir, ".coverage.sh"), source_dir]
    if platform.system() == "Windows":
        cmd = [pycoverage, "xml"]

    pyctest.test("coverage", cmd, props)
예제 #9
0
파일: __init__.py 프로젝트: tomopy/tomopy
def create_correct_module_test():
    """
    Create a test that checks we are using the locally build module
    """
    pyexe = pyctest.PYTHON_EXECUTABLE
    binary_dir = pyctest.BINARY_DIRECTORY

    # test properties
    props = {
        "WORKING_DIRECTORY": binary_dir,
        "RUN_SERIAL": "ON",
        "LABEL": "unit"
        }

    # test command
    cmd = [pyexe, "-c",
        "\"import os, sys, tomopy; " +
        "print('using tomopy module: {}'.format(tomopy.__file__)); " +
        "ret=0 if os.getcwd() in tomopy.__file__ else 1; " +
        "sys.exit(ret)\""]

    pyctest.test("correct_module", cmd, props)
예제 #10
0
def create_coverage_test(args):
    """
    Create test that generates the coverage
    """
    source_dir = pyctest.SOURCE_DIRECTORY
    binary_dir = pyctest.BINARY_DIRECTORY
    pycoverage = find_python_coverage()

    # test properties
    props = {
        "WORKING_DIRECTORY": binary_dir,
        "DEPENDS": "nosetests",
        "RUN_SERIAL": "ON",
        "LABEL": "unit"
    }

    # test command
    cmd = [os.path.join(source_dir, ".coverage.sh"), source_dir]
    if platform.system() == "Windows":
        cmd = [pycoverage, "xml"]

    pyctest.test("coverage", cmd, props)
예제 #11
0
def create_nosetest_test(args):
    """
    Create a test that runs nosetests
    """
    pyexe = pyctest.PYTHON_EXECUTABLE
    binary_dir = pyctest.BINARY_DIRECTORY
    pynosetest = find_python_nosetest()
    pycoverage = find_python_coverage()

    # test properties
    props = {
        "DEPENDS": "correct_module",
        "RUN_SERIAL": "ON",
        "LABEL": "unit",
        "WORKING_DIRECTORY": binary_dir,
        "ENVIRONMENT": "TOMOPY_USE_C_ALGORITHMS=1"
    }

    # test command: python $(which coverage) run $(which nosetest)
    cmd = [pyexe, pycoverage, "run", pynosetest]

    # create test
    pyctest.test("nosetests", cmd, props)
예제 #12
0
파일: __init__.py 프로젝트: tomopy/tomopy
def create_nosetest_test(args):
    """
    Create a test that runs nosetests
    """
    pyexe = pyctest.PYTHON_EXECUTABLE
    binary_dir = pyctest.BINARY_DIRECTORY
    pynosetest = find_python_nosetest()
    pycoverage = find_python_coverage()

    # test properties
    props = {
        "DEPENDS": "correct_module",
        "RUN_SERIAL": "ON",
        "LABEL": "unit",
        "WORKING_DIRECTORY": binary_dir,
        "ENVIRONMENT": "TOMOPY_USE_C_ALGORITHMS=1"
        }

    # test command: python $(which coverage) run $(which nosetest)
    cmd = [pyexe, pycoverage, "run", pynosetest]

    # create test
    pyctest.test("nosetests", cmd, props)
예제 #13
0
def run_pyctest():

    #--------------------------------------------------------------------------#
    # run argparse, checkout source, copy over files
    #
    args = configure()

    #--------------------------------------------------------------------------#
    # Compiler version
    #
    if os.environ.get("CXX") is None:
        os.environ["CXX"] = helpers.FindExePath("c++")
    cmd = pyct.command([os.environ["CXX"], "-dumpversion"])
    cmd.SetOutputStripTrailingWhitespace(True)
    cmd.Execute()
    compiler_version = cmd.Output()

    #--------------------------------------------------------------------------#
    # Set the build name
    #
    pyct.BUILD_NAME = "{} {} {} {} {} {}".format(
        get_branch(pyct.SOURCE_DIRECTORY),
        platform.uname()[0], helpers.GetSystemVersionInfo(),
        platform.uname()[4],
        os.path.basename(os.path.realpath(os.environ["CXX"])),
        compiler_version)
    pyct.BUILD_NAME = '-'.join(pyct.BUILD_NAME.split())

    #--------------------------------------------------------------------------#
    #   build specifications
    #
    build_opts = {
        "BUILD_SHARED_LIBS":
        "ON" if "shared" in args.build_libs else "OFF",
        "BUILD_STATIC_LIBS":
        "ON" if "static" in args.build_libs else "OFF",
        "TIMEMORY_BUILD_TOOLS":
        "ON" if args.tools else "OFF",
        "TIMEMORY_BUILD_GOTCHA":
        "ON" if args.gotcha else "OFF",
        "TIMEMORY_BUILD_PYTHON":
        "ON" if args.python else "OFF",
        "TIMEMORY_BUILD_CALIPER":
        "ON" if args.caliper else "OFF",
        "TIMEMORY_BUILD_TESTING":
        "ON",
        "TIMEMORY_BUILD_EXTRA_OPTIMIZATIONS":
        "ON" if args.extra_optimizations else "OFF",
        "TIMEMORY_USE_MPI":
        "ON" if args.mpi else "OFF",
        "TIMEMORY_USE_TAU":
        "ON" if args.tau else "OFF",
        "TIMEMORY_USE_ARCH":
        "ON" if args.arch else "OFF",
        "TIMEMORY_USE_PAPI":
        "ON" if args.papi else "OFF",
        "TIMEMORY_USE_CUDA":
        "ON" if args.cuda else "OFF",
        "TIMEMORY_USE_CUPTI":
        "ON" if args.cupti else "OFF",
        "TIMEMORY_USE_GPERF":
        "OFF",
        "TIMEMORY_USE_UPCXX":
        "ON" if args.upcxx else "OFF",
        "TIMEMORY_USE_LIKWID":
        "ON" if args.likwid else "OFF",
        "TIMEMORY_USE_GOTCHA":
        "ON" if args.gotcha else "OFF",
        "TIMEMORY_USE_PYTHON":
        "ON" if args.python else "OFF",
        "TIMEMORY_USE_CALIPER":
        "ON" if args.caliper else "OFF",
        "TIMEMORY_USE_COVERAGE":
        "ON" if args.coverage else "OFF",
        "TIMEMORY_USE_SANITIZER":
        "OFF",
        "TIMEMORY_USE_CLANG_TIDY":
        "ON" if args.static_analysis else "OFF",
        "USE_PAPI":
        "ON" if args.papi else "OFF",
        "USE_MPI":
        "ON" if args.mpi else "OFF",
        "USE_CALIPER":
        "ON" if args.caliper else "OFF",
    }

    if args.mpi and args.tools:
        build_opts["TIMEMORY_BUILD_MPIP"] = "ON" if args.mpip else "OFF"

    if args.python:
        pyver = "{}.{}.{}".format(sys.version_info[0], sys.version_info[1],
                                  sys.version_info[2])
        pyct.BUILD_NAME = "{} PY-{}".format(pyct.BUILD_NAME, pyver)

    if args.extra_optimizations:
        pyct.BUILD_NAME = "{} OPT".format(pyct.BUILD_NAME)

    if args.arch:
        pyct.BUILD_NAME = "{} ARCH".format(pyct.BUILD_NAME)

    if args.mpi:
        pyct.BUILD_NAME = "{} MPI".format(pyct.BUILD_NAME)

    if args.papi:
        pyct.BUILD_NAME = "{} PAPI".format(pyct.BUILD_NAME)

    if args.cuda:
        pyct.BUILD_NAME = "{} CUDA".format(pyct.BUILD_NAME)

    if args.cupti:
        pyct.BUILD_NAME = "{} CUPTI".format(pyct.BUILD_NAME)

    if args.caliper:
        pyct.BUILD_NAME = "{} CALIPER".format(pyct.BUILD_NAME)

    if args.gotcha:
        pyct.BUILD_NAME = "{} GOTCHA".format(pyct.BUILD_NAME)

    if args.upcxx:
        pyct.BUILD_NAME = "{} UPCXX".format(pyct.BUILD_NAME)

    if args.tau:
        pyct.BUILD_NAME = "{} TAU".format(pyct.BUILD_NAME)

    if args.likwid:
        pyct.BUILD_NAME = "{} LIKWID".format(pyct.BUILD_NAME)

    if args.profile is not None:
        build_opts["TIMEMORY_USE_GPERF"] = "ON"
        components = "profiler" if args.profile == "cpu" else "tcmalloc"
        build_opts["TIMEMORY_gperftools_COMPONENTS"] = components
        pyct.BUILD_NAME = "{} {}".format(pyct.BUILD_NAME, args.profile.upper())

    if args.sanitizer is not None:
        pyct.BUILD_NAME = "{} {}SAN".format(pyct.BUILD_NAME,
                                            args.sanitizer.upper()[0])
        build_opts["SANITIZER_TYPE"] = args.sanitizer
        build_opts["TIMEMORY_USE_SANITIZER"] = "ON"

    if args.coverage:
        gcov_exe = helpers.FindExePath("gcov")
        if gcov_exe is not None:
            pyct.COVERAGE_COMMAND = "{}".format(gcov_exe)
            build_opts["TIMEMORY_USE_COVERAGE"] = "ON"
            pyct.BUILD_NAME = "{} COV".format(pyct.BUILD_NAME)
            if pyct.BUILD_TYPE != "Debug":
                warnings.warn(
                    "Forcing build type to 'Debug' when coverage is enabled")
                pyct.BUILD_TYPE = "Debug"
        else:
            build_opts["TIMEMORY_USE_COVERAGE"] = "OFF"
        pyct.set("CTEST_CUSTOM_COVERAGE_EXCLUDE", ".*external/.*;/usr/.*")

    # split and join with dashes
    pyct.BUILD_NAME = '-'.join(pyct.BUILD_NAME.replace('/', '-').split())

    # default options
    cmake_args = "-DCMAKE_BUILD_TYPE={} -DTIMEMORY_BUILD_EXAMPLES=ON".format(
        pyct.BUILD_TYPE)

    # customized from args
    for key, val in build_opts.items():
        cmake_args = "{} -D{}={}".format(cmake_args, key, val)

    #--------------------------------------------------------------------------#
    # how to build the code
    #
    ctest_cmake_cmd = "${CTEST_CMAKE_COMMAND}"
    pyct.CONFIGURE_COMMAND = "{} {} {}".format(ctest_cmake_cmd, cmake_args,
                                               pyct.SOURCE_DIRECTORY)

    #--------------------------------------------------------------------------#
    # how to build the code
    #
    pyct.BUILD_COMMAND = "{} --build {} --target all".format(
        ctest_cmake_cmd, pyct.BINARY_DIRECTORY)

    #--------------------------------------------------------------------------#
    # parallel build
    #
    if platform.system() != "Windows":
        pyct.BUILD_COMMAND = "{} -- -j{} VERBOSE=1".format(
            pyct.BUILD_COMMAND, mp.cpu_count())
    else:
        pyct.BUILD_COMMAND = "{} -- /MP -A x64".format(pyct.BUILD_COMMAND)

    #--------------------------------------------------------------------------#
    # how to update the code
    #
    git_exe = helpers.FindExePath("git")
    pyct.UPDATE_COMMAND = "{}".format(git_exe)
    pyct.set("CTEST_UPDATE_TYPE", "git")
    pyct.set("CTEST_GIT_COMMAND", "{}".format(git_exe))

    #--------------------------------------------------------------------------#
    # find the CTEST_TOKEN_FILE
    #
    if args.pyctest_token_file is None and args.pyctest_token is None:
        home = helpers.GetHomePath()
        if home is not None:
            token_path = os.path.join(home,
                                      os.path.join(".tokens", "nersc-cdash"))
            if os.path.exists(token_path):
                pyct.set("CTEST_TOKEN_FILE", token_path)

    #--------------------------------------------------------------------------#
    # construct a command
    #
    def construct_name(test_name):
        return test_name.replace("_", "-")

    #--------------------------------------------------------------------------#
    # construct a command
    #
    def construct_command(cmd, args):
        global clobber_notes
        _cmd = []
        if args.profile is not None:
            _exe = os.path.basename(cmd[0])
            if args.profile == "cpu":
                _cmd.append(
                    os.path.join(pyct.BINARY_DIRECTORY,
                                 "gperf-cpu-profile.sh"))
                pyct.add_note(pyct.BINARY_DIRECTORY,
                              "cpu.prof.{}/gperf.0.txt".format(_exe),
                              clobber=clobber_notes)
                pyct.add_note(pyct.BINARY_DIRECTORY,
                              "cpu.prof.{}/gperf.0.cum.txt".format(_exe),
                              clobber=False)
                clobber_notes = False
            elif args.profile == "heap":
                _cmd.append(
                    os.path.join(pyct.BINARY_DIRECTORY,
                                 "gperf-heap-profile.sh"))
                for itr in [
                        "alloc_objects", "alloc_space", "inuse_objects",
                        "inuse_space"
                ]:
                    pyct.add_note(
                        pyct.BINARY_DIRECTORY,
                        "heap.prof.{}/gperf.0.0001.heap.{}.txt".format(
                            _exe, itr),
                        clobber=clobber_notes)
                    # make sure all subsequent iterations don't clobber
                    clobber_notes = False
        _cmd.extend(cmd)
        return _cmd

    #--------------------------------------------------------------------------#
    # construct a command
    #
    def construct_roofline_command(cmd, dir, extra_opts=[]):
        _cmd = [
            sys.executable, '-m', 'timemory.roofline', '-e', '-D', dir,
            '--format', 'png'
        ]
        _cmd.extend(extra_opts)
        _cmd.extend(['--'])
        _cmd.extend(cmd)
        return _cmd

    #--------------------------------------------------------------------------#
    # create tests
    #
    test_env = ";".join([
        "CPUPROFILE_FREQUENCY=200", "CPUPROFILE_REALTIME=1",
        "CALI_CONFIG_PROFILE=runtime-report", "TIMEMORY_DART_OUTPUT=ON",
        "TIMEMORY_DART_COUNT=1"
    ])

    pyct.test(
        construct_name("ex-optional-off"),
        construct_command(["./ex_optional_off"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-cxx-overhead"),
        construct_command(["./ex_cxx_overhead"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "600",
            "ENVIRONMENT": test_env
        })

    if args.cuda:
        pyct.test(
            construct_name("ex-cuda-event"), ["./ex_cuda_event"], {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": test_env
            })

    pyct.test(
        construct_name("ex-cxx-minimal"),
        construct_command(["./ex_cxx_minimal"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-c-minimal-library-overload"),
        construct_command(["./ex_c_minimal_library_overload"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-c-timing"),
        construct_command(["./ex_c_timing"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-cxx-minimal-library"),
        construct_command(["./ex_cxx_minimal_library"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-optional-on"),
        construct_command(["./ex_optional_on"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-c-minimal-library"),
        construct_command(["./ex_c_minimal_library"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-ert"), construct_command(["./ex_ert"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "600",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-cxx-tuple"),
        construct_command(["./ex_cxx_tuple"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-gotcha-mpi"),
        construct_command(["./ex_gotcha_mpi"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    if args.python:
        pyct.test(
            construct_name("ex-python-caliper"),
            construct_command(["./ex_python_caliper"], args), {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": test_env
            })

    pyct.test(
        construct_name("ex-caliper"),
        construct_command(["./ex_caliper"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-c-minimal"),
        construct_command(["./ex_c_minimal"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-cxx-minimal-library-overload"),
        construct_command(["./ex_cxx_minimal_library_overload"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-cxx-basic"),
        construct_command(["./ex_cxx_basic"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    if args.python:
        pyct.test(
            construct_name("ex-python-minimal"),
            construct_command(["./ex_python_minimal"], args), {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "480",
                "ENVIRONMENT": test_env
            })

    pyct.test(
        construct_name("ex-gotcha"), construct_command(["./ex_gotcha"], args),
        {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    if args.likwid:
        pyct.test(
            construct_name("ex-likwid"),
            construct_command(["./ex_likwid"], args), {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": test_env
            })

        if args.python:
            pyct.test(
                construct_name("ex-python-likwid"),
                construct_command(["./ex_python_likwid"], args), {
                    "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                    "LABELS": pyct.PROJECT_NAME,
                    "TIMEOUT": "300",
                    "ENVIRONMENT": test_env
                })

    if not args.python:
        pyct.test(
            construct_name("ex-cpu-roofline"),
            construct_roofline_command(["./ex_cpu_roofline"], 'cpu-roofline',
                                       ['-t', 'cpu_roofline']),
            {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "900",
                "ENVIRONMENT": test_env
            })

        pyct.test(
            construct_name("ex-cpu-roofline.sp"),
            construct_roofline_command(["./ex_cpu_roofline.sp"],
                                       'cpu-roofline.sp',
                                       ['-t', 'cpu_roofline']),
            {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "900",
                "ENVIRONMENT": test_env
            })

        if args.cupti:
            pyct.test(
                construct_name("ex-gpu-roofline"),
                construct_roofline_command(["./ex_gpu_roofline"],
                                           'gpu-roofline',
                                           ['-t', 'gpu_roofline']),
                {
                    "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                    "LABELS": pyct.PROJECT_NAME,
                    "TIMEOUT": "900",
                    "ENVIRONMENT": test_env
                })

    pyct.generate_config(pyct.BINARY_DIRECTORY)
    pyct.generate_test_file(os.path.join(pyct.BINARY_DIRECTORY, "tests"))
    if not args.generate:
        pyct.run(pyct.ARGUMENTS, pyct.BINARY_DIRECTORY)
        if args.coverage:
            script = os.path.join(pyct.SOURCE_DIRECTORY, "cmake", "Scripts",
                                  "submit-coverage.sh")
            cov = pyct.command([script, pyct.BINARY_DIRECTORY])
            cov.SetWorkingDirectory(pyct.SOURCE_DIRECTORY)
            cov.Execute()
            print("{}".format(cov.Output()))
예제 #14
0
    # these are required
    pyctest.PROJECT_NAME = "PyCTest"
    pyctest.SOURCE_DIRECTORY = os.getcwd()
    pyctest.BINARY_DIRECTORY = directory

    args = helpers.ArgumentParser(pyctest.PROJECT_NAME,
                                  pyctest.SOURCE_DIRECTORY,
                                  pyctest.BINARY_DIRECTORY,
                                  update_command="git").parse_args()

    # set explicitly
    pyctest.MODEL = "Continuous"
    pyctest.SITE = platform.node()

    # create a Test object
    test = pyctest.test()
    test.SetName("list_directory")
    test.SetCommand(["ls", directory])
    test.SetProperty("WORKING_DIRECTORY", os.getcwd())
    test.SetProperty("LABELS", "shutil")

    # create a second test with direct initialization
    pyctest.test("hostname", ["hostname"], {
        "RUN_SERIAL": "ON",
        "TIMEOUT": "10",
        "LABELS": "network"
    })

    # run CTest -- e.g. ctest -VV ${PWD}/pycm-test
    pyctest.run()
예제 #15
0
def run_pyctest():

    #--------------------------------------------------------------------------#
    # run argparse, checkout source, copy over files
    #
    args = configure()

    #--------------------------------------------------------------------------#
    # Compiler version
    #
    if os.environ.get("CXX") is None:
        os.environ["CXX"] = helpers.FindExePath("c++")
    cmd = pyct.command([os.environ["CXX"], "--version"])
    cmd.SetOutputStripTrailingWhitespace(True)
    cmd.Execute()
    compiler_version = cmd.Output()
    try:
        cn = compiler_version.split()[0]
        cv = re.search(r'(\b)\d.\d.\d', compiler_version)
        compiler_version = '{}-{}'.format(cn, cv.group()[0])
    except Exception as e:
        print("Exception! {}".format(e))
        cmd = pyct.command([os.environ["CXX"], "-dumpversion"])
        cmd.SetOutputStripTrailingWhitespace(True)
        cmd.Execute()
        compiler_version = cmd.Output()

    #--------------------------------------------------------------------------#
    # Set the build name
    #
    pyct.BUILD_NAME = "{} {} {} {} {}".format(
        get_branch(pyct.SOURCE_DIRECTORY),
        platform.uname()[0], helpers.GetSystemVersionInfo(),
        platform.uname()[4], compiler_version)
    pyct.BUILD_NAME = '-'.join(pyct.BUILD_NAME.split())

    #--------------------------------------------------------------------------#
    #   build specifications
    #
    build_opts = {
        "BUILD_SHARED_LIBS":
        "ON" if "shared" in args.build_libs else "OFF",
        "BUILD_STATIC_LIBS":
        "ON" if "static" in args.build_libs else "OFF",
        "CMAKE_CXX_STANDARD":
        "{}".format(args.cxx_standard),
        "TIMEMORY_TLS_MODEL":
        "{}".format(args.tls_model),
        "TIMEMORY_CCACHE_BUILD":
        "OFF",
        "TIMEMORY_BUILD_C":
        "ON",
        "TIMEMORY_BUILD_LTO":
        "ON" if args.lto else "OFF",
        "TIMEMORY_BUILD_OMPT":
        "OFF",
        "TIMEMORY_BUILD_TOOLS":
        "ON" if args.tools else "OFF",
        "TIMEMORY_BUILD_GOTCHA":
        "ON" if args.gotcha else "OFF",
        "TIMEMORY_BUILD_PYTHON":
        "ON" if args.python else "OFF",
        "TIMEMORY_BUILD_CALIPER":
        "ON" if args.caliper else "OFF",
        "TIMEMORY_BUILD_DEVELOPER":
        "ON" if args.developer else "OFF",
        "TIMEMORY_BUILD_TESTING":
        "ON",
        "TIMEMORY_BUILD_EXTRA_OPTIMIZATIONS":
        "ON" if args.extra_optimizations else "OFF",
        "TIMEMORY_USE_MPI":
        "ON" if args.mpi else "OFF",
        "TIMEMORY_USE_TAU":
        "ON" if args.tau else "OFF",
        "TIMEMORY_USE_ARCH":
        "ON" if args.arch else "OFF",
        "TIMEMORY_USE_PAPI":
        "ON" if args.papi else "OFF",
        "TIMEMORY_USE_CUDA":
        "ON" if args.cuda else "OFF",
        "TIMEMORY_USE_NVTX":
        "ON" if args.nvtx else "OFF",
        "TIMEMORY_USE_OMPT":
        "ON" if args.ompt else "OFF",
        "TIMEMORY_USE_XRAY":
        "ON" if args.xray else "OFF",
        "TIMEMORY_USE_CUPTI":
        "ON" if args.cupti else "OFF",
        "TIMEMORY_USE_UPCXX":
        "ON" if args.upcxx else "OFF",
        "TIMEMORY_USE_LIKWID":
        "ON" if args.likwid else "OFF",
        "TIMEMORY_USE_GOTCHA":
        "ON" if args.gotcha else "OFF",
        "TIMEMORY_USE_PYTHON":
        "ON" if args.python else "OFF",
        "TIMEMORY_USE_CALIPER":
        "ON" if args.caliper else "OFF",
        "TIMEMORY_USE_COVERAGE":
        "ON" if args.coverage else "OFF",
        "TIMEMORY_USE_GPERFTOOLS":
        "ON" if args.gperftools else "OFF",
        "TIMEMORY_USE_STATISTICS":
        "ON" if args.stats else "OFF",
        "TIMEMORY_USE_COMPILE_TIMING":
        "ON" if args.timing else "OFF",
        "TIMEMORY_USE_SANITIZER":
        "OFF",
        "TIMEMORY_USE_CLANG_TIDY":
        "ON" if args.static_analysis else "OFF",
        "USE_PAPI":
        "ON" if args.papi else "OFF",
        "USE_MPI":
        "ON" if args.mpi else "OFF",
        "USE_CALIPER":
        "ON" if args.caliper else "OFF",
        "PYTHON_EXECUTABLE":
        "{}".format(sys.executable),
    }

    if args.mpi and args.mpi_init:
        build_opts["TIMEMORY_USE_MPI_INIT"] = "ON"

    if args.ompt:
        build_opts["OPENMP_ENABLE_LIBOMPTARGET"] = "OFF"

    if args.tools:
        build_opts["TIMEMORY_BUILD_MPIP_LIBRARY"] = "ON" if (
            args.mpi and args.mpip) else "OFF"
        build_opts["TIMEMORY_BUILD_OMPT_LIBRARY"] = "ON" if (
            args.ompt) else "OFF"
        build_opts[
            "TIMEMORY_BUILD_KOKKOS_TOOLS"] = "ON" if args.kokkos else "OFF"
        build_opts[
            "TIMEMORY_BUILD_DYNINST_TOOLS"] = "ON" if args.dyninst else "OFF"

    if args.python:
        pyver = "{}.{}.{}".format(sys.version_info[0], sys.version_info[1],
                                  sys.version_info[2])
        pyct.BUILD_NAME = "{} PY-{}".format(pyct.BUILD_NAME, pyver)

    if args.profile is not None:
        build_opts["TIMEMORY_USE_GPERFTOOLS"] = "ON"
        components = "profiler" if args.profile == "cpu" else "tcmalloc"
        build_opts["TIMEMORY_gperftools_COMPONENTS"] = components
        pyct.BUILD_NAME = "{} {}".format(pyct.BUILD_NAME, args.profile.upper())

    if args.sanitizer is not None:
        pyct.BUILD_NAME = "{} {}SAN".format(pyct.BUILD_NAME,
                                            args.sanitizer.upper()[0])
        build_opts["SANITIZER_TYPE"] = args.sanitizer
        build_opts["TIMEMORY_USE_SANITIZER"] = "ON"

    if args.coverage:
        gcov_exe = helpers.FindExePath("gcov")
        if gcov_exe is not None:
            pyct.COVERAGE_COMMAND = "{}".format(gcov_exe)
            build_opts["TIMEMORY_USE_COVERAGE"] = "ON"
            pyct.BUILD_NAME = "{} COV".format(pyct.BUILD_NAME)
            if pyct.BUILD_TYPE != "Debug":
                warnings.warn(
                    "Forcing build type to 'Debug' when coverage is enabled")
                pyct.BUILD_TYPE = "Debug"
        else:
            build_opts["TIMEMORY_USE_COVERAGE"] = "OFF"

    pyct.set("CTEST_CUSTOM_COVERAGE_EXCLUDE", ".*external/.*;/usr/.*")
    pyct.set("CTEST_CUSTOM_MAXIMUM_NUMBER_OF_ERRORS", "100")
    pyct.set("CTEST_CUSTOM_MAXIMUM_NUMBER_OF_WARNINGS", "100")

    # Use the options to create a build name with configuration
    build_name = set()
    mangled_tags = {
        "EXTRA_OPTIMIZATIONS": "OPT",
        "KOKKOS_TOOLS": "KOKKOS",
        "DYNINST_TOOLS": "DYNINST"
    }
    exclude_keys = ("TESTING", "EXAMPLES", "GOOGLE_TEST", "CCACHE_BUILD",
                    "gperftools_COMPONENTS")
    for opt_key, opt_val in build_opts.items():
        tag = None
        key = None
        if opt_val == "OFF" or opt_val is None:
            continue
        else:
            if "TIMEMORY_BUILD_" in opt_key:
                tag = opt_key.replace("TIMEMORY_BUILD_", "")
                key = tag
            elif "TIMEMORY_USE_" in opt_key:
                tag = opt_key.replace("TIMEMORY_USE_", "")
                key = tag
            elif "TIMEMORY_" in opt_key:
                key = opt_key.replace("TIMEMORY_", "")
                tag = "{}_{}".format(key, opt_val)

        # if valid and turned on
        if tag is not None and key is not None and key not in exclude_keys:
            tag = mangled_tags.get(tag, tag)
            build_name.add(tag)

    build_name = sorted(build_name)
    pyct.BUILD_NAME += " {}".format(" ".join(build_name))

    # split and join with dashes
    pyct.BUILD_NAME = '-'.join(pyct.BUILD_NAME.replace('/', '-').split())

    # default options
    cmake_args = "-DCMAKE_BUILD_TYPE={} -DTIMEMORY_BUILD_EXAMPLES=ON".format(
        pyct.BUILD_TYPE)

    # customized from args
    for key, val in build_opts.items():
        cmake_args = "{} -D{}={}".format(cmake_args, key, val)

    cmake_args = "{} {}".format(cmake_args, " ".join(pycm.ARGUMENTS))

    #--------------------------------------------------------------------------#
    # how to build the code
    #
    ctest_cmake_cmd = "${CTEST_CMAKE_COMMAND}"
    pyct.CONFIGURE_COMMAND = "{} {} {}".format(ctest_cmake_cmd, cmake_args,
                                               pyct.SOURCE_DIRECTORY)

    #--------------------------------------------------------------------------#
    # how to build the code
    #
    pyct.BUILD_COMMAND = "{} --build {} --target all".format(
        ctest_cmake_cmd, pyct.BINARY_DIRECTORY)

    #--------------------------------------------------------------------------#
    # parallel build
    #
    if platform.system() != "Windows":
        pyct.BUILD_COMMAND = "{} -- -j{}".format(pyct.BUILD_COMMAND,
                                                 args.cpu_count)
    else:
        pyct.BUILD_COMMAND = "{} -- /MP -A x64".format(pyct.BUILD_COMMAND)

    #--------------------------------------------------------------------------#
    # how to update the code
    #
    git_exe = helpers.FindExePath("git")
    pyct.UPDATE_COMMAND = "{}".format(git_exe)
    pyct.set("CTEST_UPDATE_TYPE", "git")
    pyct.set("CTEST_GIT_COMMAND", "{}".format(git_exe))

    #--------------------------------------------------------------------------#
    # find the CTEST_TOKEN_FILE
    #
    if args.pyctest_token_file is None and args.pyctest_token is None:
        home = helpers.GetHomePath()
        if home is not None:
            token_path = os.path.join(home,
                                      os.path.join(".tokens", "nersc-cdash"))
            if os.path.exists(token_path):
                pyct.set("CTEST_TOKEN_FILE", token_path)

    #--------------------------------------------------------------------------#
    # construct a command
    #
    def construct_name(test_name):
        return test_name.replace("_", "-")

    #--------------------------------------------------------------------------#
    # construct a command
    #
    def construct_command(cmd, args):
        global clobber_notes
        _cmd = []
        if args.profile is not None:
            _exe = os.path.basename(cmd[0])
            if args.profile == "cpu":
                _cmd.append(
                    os.path.join(pyct.BINARY_DIRECTORY,
                                 "gperf-cpu-profile.sh"))
                pyct.add_note(pyct.BINARY_DIRECTORY,
                              "cpu.prof.{}/gperf.0.txt".format(_exe),
                              clobber=clobber_notes)
                pyct.add_note(pyct.BINARY_DIRECTORY,
                              "cpu.prof.{}/gperf.0.cum.txt".format(_exe),
                              clobber=False)
                clobber_notes = False
            elif args.profile == "heap":
                _cmd.append(
                    os.path.join(pyct.BINARY_DIRECTORY,
                                 "gperf-heap-profile.sh"))
                for itr in [
                        "alloc_objects", "alloc_space", "inuse_objects",
                        "inuse_space"
                ]:
                    pyct.add_note(
                        pyct.BINARY_DIRECTORY,
                        "heap.prof.{}/gperf.0.0001.heap.{}.txt".format(
                            _exe, itr),
                        clobber=clobber_notes)
                    # make sure all subsequent iterations don't clobber
                    clobber_notes = False
        _cmd.extend(cmd)
        return _cmd

    #--------------------------------------------------------------------------#
    # construct a command
    #
    def construct_roofline_command(cmd, dir, extra_opts=[]):
        _cmd = [
            sys.executable, '-m', 'timemory.roofline', '-e', '-D', dir,
            '--format', 'png'
        ]
        _cmd.extend(extra_opts)
        _cmd.extend(['--'])
        _cmd.extend(cmd)
        return _cmd

    #--------------------------------------------------------------------------#
    # create tests
    #
    pypath = ":".join(
        ["{}".format(pyct.BINARY_DIRECTORY),
         os.environ.get("PYTHONPATH", "")])
    base_env = ";".join([
        "CPUPROFILE_FREQUENCY=200", "CPUPROFILE_REALTIME=1",
        "CALI_CONFIG_PROFILE=runtime-report", "TIMEMORY_PLOT_OUTPUT=ON",
        "PYTHONPATH={}".format(pypath)
    ])
    test_env = ";".join(
        [base_env, "TIMEMORY_DART_OUTPUT=ON", "TIMEMORY_DART_COUNT=1"])

    if args.tools:
        pyct.test(
            "timem-timemory-avail", ["./timem", "./timemory-avail"], {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": test_env
            })

    pyct.test(
        construct_name("ex-derived"),
        construct_command(["./ex_derived"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-optional-off"),
        construct_command(["./ex_optional_off"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    overhead_cmd = ["./ex_cxx_overhead"]
    if args.coverage:
        overhead_cmd += ["40", "30"]

    pyct.test(
        construct_name("ex-cxx-overhead"),
        construct_command(overhead_cmd, args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "600",
            "ENVIRONMENT": test_env
        })

    if args.cuda:
        pyct.test(
            construct_name("ex-cuda-event"), ["./ex_cuda_event"], {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": test_env
            })

    pyct.test(
        construct_name("ex-cxx-minimal"),
        construct_command(["./ex_cxx_minimal"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-c-minimal-library-overload"),
        construct_command(["./ex_c_minimal_library_overload"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-c-timing"),
        construct_command(["./ex_c_timing"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-cxx-minimal-library"),
        construct_command(["./ex_cxx_minimal_library"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-optional-on"),
        construct_command(["./ex_optional_on"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-c-minimal-library"),
        construct_command(["./ex_c_minimal_library"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    ert_cmd = ["./ex_ert"]
    if args.coverage:
        ert_cmd += ["512", "1081344", "2"]

    pyct.test(
        construct_name("ex-ert"), construct_command(ert_cmd, args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "600",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-cxx-tuple"),
        construct_command(["./ex_cxx_tuple"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    if args.gotcha:
        pyct.test(
            construct_name("ex-gotcha"),
            construct_command(["./ex_gotcha"], args), {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": test_env
            })

        pyct.test(
            construct_name("ex-gotcha-replacement"),
            construct_command(["./ex_gotcha_replacement"], args), {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": test_env
            })

        if args.mpi:
            pyct.test(
                construct_name("ex-gotcha-mpi"),
                construct_command(["./ex_gotcha_mpi"], args), {
                    "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                    "LABELS": pyct.PROJECT_NAME,
                    "TIMEOUT": "300",
                    "ENVIRONMENT": test_env
                })

    if args.python:
        pyct.test(
            "timemory-python", [sys.executable, "-c", "\"import timemory\""], {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": base_env
            })

        pyunittests = ["flat", "rusage", "throttle", "timeline", "timing"]
        for t in pyunittests:
            pyct.test(
                "python-unittest-{}".format(t),
                [sys.executable, "-m", "timemory.test.test_{}".format(t)], {
                    "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                    "LABELS": pyct.PROJECT_NAME,
                    "TIMEOUT": "300",
                    "ENVIRONMENT": base_env
                })

        pyct.test(
            construct_name("ex-python-bindings"),
            construct_command(
                ["mpirun", "-np", "2", sys.executable, "./ex_python_bindings"],
                args), {
                    "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                    "LABELS": pyct.PROJECT_NAME,
                    "TIMEOUT": "300",
                    "ENVIRONMENT": base_env
                })

        if args.caliper:
            pyct.test(
                construct_name("ex-python-caliper"),
                construct_command([sys.executable, "./ex_python_caliper"],
                                  args),
                {
                    "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                    "LABELS": pyct.PROJECT_NAME,
                    "TIMEOUT": "300",
                    "ENVIRONMENT": base_env
                })

        pyct.test(
            construct_name("ex-python-general"),
            construct_command([sys.executable, "./ex_python_general"], args), {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": base_env
            })

        pyct.test(
            construct_name("ex-python-profiler"),
            construct_command([sys.executable, "./ex_python_profiler"], args),
            {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": base_env
            })

        pyct.test(
            construct_name("ex-python-sample"),
            construct_command([sys.executable, "./ex_python_sample"], args), {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": base_env
            })

    if args.caliper:
        pyct.test(
            construct_name("ex-caliper"),
            construct_command(["./ex_caliper"], args), {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": test_env
            })

    pyct.test(
        construct_name("ex-c-minimal"),
        construct_command(["./ex_c_minimal"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-cxx-minimal-library-overload"),
        construct_command(["./ex_cxx_minimal_library_overload"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-cxx-basic"),
        construct_command(["./ex_cxx_basic"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    pyct.test(
        construct_name("ex-statistics"),
        construct_command(["./ex_cxx_statistics"], args), {
            "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
            "LABELS": pyct.PROJECT_NAME,
            "TIMEOUT": "300",
            "ENVIRONMENT": test_env
        })

    if args.python:
        pyct.test(
            construct_name("ex-python-minimal"),
            construct_command([sys.executable, "./ex_python_minimal"], args), {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "480",
                "ENVIRONMENT": test_env
            })

    if args.likwid:
        pyct.test(
            construct_name("ex-likwid"),
            construct_command(["./ex_likwid"], args), {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "300",
                "ENVIRONMENT": test_env
            })

        if args.python:
            pyct.test(
                construct_name("ex-python-likwid"),
                construct_command([sys.executable, "./ex_python_likwid"],
                                  args),
                {
                    "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                    "LABELS": pyct.PROJECT_NAME,
                    "TIMEOUT": "300",
                    "ENVIRONMENT": test_env
                })

    if not args.python:
        pyct.test(
            construct_name("ex-cpu-roofline"),
            construct_roofline_command(["./ex_cpu_roofline"], 'cpu-roofline',
                                       ['-t', 'cpu_roofline']),
            {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "900",
                "ENVIRONMENT": test_env
            })

        pyct.test(
            construct_name("ex-cpu-roofline.sp"),
            construct_roofline_command(["./ex_cpu_roofline.sp"],
                                       'cpu-roofline.sp',
                                       ['-t', 'cpu_roofline']),
            {
                "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                "LABELS": pyct.PROJECT_NAME,
                "TIMEOUT": "900",
                "ENVIRONMENT": test_env
            })

        if args.cupti:
            pyct.test(
                construct_name("ex-gpu-roofline"),
                construct_roofline_command(["./ex_gpu_roofline"],
                                           'gpu-roofline',
                                           ['-t', 'gpu_roofline']),
                {
                    "WORKING_DIRECTORY": pyct.BINARY_DIRECTORY,
                    "LABELS": pyct.PROJECT_NAME,
                    "TIMEOUT": "900",
                    "ENVIRONMENT": test_env
                })

    pyct.generate_config(pyct.BINARY_DIRECTORY)
    pyct.generate_test_file(os.path.join(pyct.BINARY_DIRECTORY, "tests"))
    if not args.generate:
        pyct.run(pyct.ARGUMENTS, pyct.BINARY_DIRECTORY)
        if args.coverage:
            script = os.path.join(pyct.SOURCE_DIRECTORY, "cmake", "Scripts",
                                  "submit-coverage.sh")
            cov = pyct.command([script, pyct.BINARY_DIRECTORY])
            cov.SetWorkingDirectory(pyct.SOURCE_DIRECTORY)
            cov.Execute()
            print("{}".format(cov.Output()))
    else:
        print("BUILD_NAME: {}".format(pyct.BUILD_NAME))
예제 #16
0
def run_pyctest():

    #--------------------------------------------------------------------------#
    # run argparse, checkout source, copy over files
    #
    args = configure()

    #--------------------------------------------------------------------------#
    # Compiler version
    #
    if os.environ.get("CXX") is None:
        os.environ["CXX"] = helpers.FindExePath("c++")
    cmd = pyctest.command([os.environ["CXX"], "-dumpversion"])
    cmd.SetOutputStripTrailingWhitespace(True)
    cmd.Execute()
    compiler_version = cmd.Output()

    #--------------------------------------------------------------------------#
    # Set the build name
    #
    pyctest.BUILD_NAME = "{} {} {} {} {} {}".format(
        pyctest.GetGitBranch(pyctest.SOURCE_DIRECTORY),
        platform.uname()[0], helpers.GetSystemVersionInfo(),
        platform.uname()[4],
        os.path.basename(os.path.realpath(os.environ["CXX"])),
        compiler_version)
    pyctest.BUILD_NAME = '-'.join(pyctest.BUILD_NAME.split())

    #--------------------------------------------------------------------------#
    #   build specifications
    #
    build_opts = {
        "GEANT_USE_ARCH": "OFF",
        "GEANT_USE_GPERF": "OFF",
        "GEANT_USE_SANITIZER": "OFF",
        "GEANT_USE_CLANG_TIDY": "OFF",
        "GEANT_USE_COVERAGE": "OFF",
        "PTL_USE_TBB": "OFF",
        "GEANT_BUILD_EXAMPLES": "ON",
        "GEANT_BUILD_TESTS": "ON",
        "PTL_BUILD_EXAMPLES": "ON"
    }

    if args.tbb:
        pyctest.BUILD_NAME = "{} tbb".format(pyctest.BUILD_NAME)
        build_opts["PTL_USE_TBB"] = "ON"
    if args.arch:
        pyctest.BUILD_NAME = "{} arch".format(pyctest.BUILD_NAME)
        build_opts["GEANT_USE_ARCH"] = "ON"
    if args.gperf:
        pyctest.BUILD_NAME = "{} gperf".format(pyctest.BUILD_NAME)
        build_opts["GEANT_USE_GPERF"] = "ON"
        warnings.warn(
            "Forcing build type to 'RelWithDebInfo' when gperf is enabled")
        pyctest.BUILD_TYPE = "RelWithDebInfo"
    if args.sanitizer:
        pyctest.BUILD_NAME = "{} asan".format(pyctest.BUILD_NAME)
        build_opts["GEANT_USE_SANITIZER"] = "ON"
    if args.no_static_analysis:
        build_opts["GEANT_USE_CLANG_TIDY"] = "OFF"
    if args.coverage:
        gcov_exe = helpers.FindExePath("gcov")
        if gcov_exe is not None:
            pyctest.COVERAGE_COMMAND = "{}".format(gcov_exe)
            build_opts["GEANT_USE_COVERAGE"] = "ON"
            warnings.warn(
                "Forcing build type to 'Debug' when coverage is enabled")
            pyctest.BUILD_TYPE = "Debug"

    # split and join with dashes
    pyctest.BUILD_NAME = '-'.join(pyctest.BUILD_NAME.replace('/', '-').split())

    # default options
    cmake_args = "-DCMAKE_BUILD_TYPE={} -DPTL_BUILD_EXAMPLES=ON".format(
        pyctest.BUILD_TYPE)

    # customized from args
    for key, val in build_opts.items():
        cmake_args = "{} -D{}={}".format(cmake_args, key, val)

    #--------------------------------------------------------------------------#
    # how to build the code
    #
    ctest_cmake_cmd = "${CTEST_CMAKE_COMMAND}"
    pyctest.CONFIGURE_COMMAND = "{} {} {}".format(ctest_cmake_cmd, cmake_args,
                                                  pyctest.SOURCE_DIRECTORY)

    #--------------------------------------------------------------------------#
    # how to build the code
    #
    pyctest.BUILD_COMMAND = "{} --build {} --target all".format(
        ctest_cmake_cmd, pyctest.BINARY_DIRECTORY)

    #--------------------------------------------------------------------------#
    # parallel build
    #
    if platform.system() != "Windows":
        pyctest.BUILD_COMMAND = "{} -- -j{} VERBOSE=1".format(
            pyctest.BUILD_COMMAND, mp.cpu_count())
    else:
        pyctest.BUILD_COMMAND = "{} -- /MP -A x64".format(
            pyctest.BUILD_COMMAND)

    #--------------------------------------------------------------------------#
    # how to update the code
    #
    git_exe = helpers.FindExePath("git")
    pyctest.UPDATE_COMMAND = "{}".format(git_exe)
    pyctest.set("CTEST_UPDATE_TYPE", "git")
    pyctest.set("CTEST_GIT_COMMAND", "{}".format(git_exe))

    #--------------------------------------------------------------------------#
    # find the CTEST_TOKEN_FILE
    #
    if args.pyctest_token_file is None and args.pyctest_token is None:
        home = helpers.GetHomePath()
        if home is not None:
            token_path = os.path.join(home,
                                      os.path.join(".tokens", "nersc-cdash"))
            if os.path.exists(token_path):
                pyctest.set("CTEST_TOKEN_FILE", token_path)

    #--------------------------------------------------------------------------#
    # construct a command
    #
    def construct_command(cmd, args, clobber=False):
        _cmd = []
        if args.gperf:
            _cmd.append(
                os.path.join(pyctest.BINARY_DIRECTORY, "gperf-cpu-profile.sh"))
            pyctest.add_note(pyctest.BINARY_DIRECTORY,
                             "gperf.cpu.prof.{}.0.txt".format(
                                 os.path.basename(cmd[0])),
                             clobber=clobber)
            pyctest.add_note(pyctest.BINARY_DIRECTORY,
                             "gperf.cpu.prof.{}.0.cum.txt".format(
                                 os.path.basename(cmd[0])),
                             clobber=False)
        #else:
        #    _cmd.append("./timem")
        _cmd.extend(cmd)
        return _cmd

    #--------------------------------------------------------------------------#
    # standard environment settings for tests, adds profile to notes
    #
    def test_env_settings(prof_fname, clobber=False, extra=""):
        return "PTL_NUM_THREADS={};{}".format(mp.cpu_count(), extra)

    #--------------------------------------------------------------------------#
    # create tests
    #
    pyctest.test(
        "test_tuple", construct_command(["./test_tuple"], args, clobber=True),
        {
            "WORKING_DIRECTORY": pyctest.BINARY_DIRECTORY,
            "LABELS": pyctest.PROJECT_NAME
        })
    pyctest.test(
        "test_memory", construct_command(["./test_memory"], args), {
            "WORKING_DIRECTORY": pyctest.BINARY_DIRECTORY,
            "LABELS": pyctest.PROJECT_NAME
        })
    pyctest.test(
        "bench_tuple", construct_command(["./bench_tuple"], args), {
            "WORKING_DIRECTORY": pyctest.BINARY_DIRECTORY,
            "LABELS": pyctest.PROJECT_NAME
        })
    pyctest.test(
        "bench_nvstd", construct_command(["./bench_nvstd"], args), {
            "WORKING_DIRECTORY": pyctest.BINARY_DIRECTORY,
            "LABELS": pyctest.PROJECT_NAME
        })
    pyctest.test(
        "track_manager_tuple",
        construct_command(["./track_manager_tuple"], args), {
            "WORKING_DIRECTORY": pyctest.BINARY_DIRECTORY,
            "LABELS": pyctest.PROJECT_NAME
        })

    tasking_suffix = ""
    if args.num_tasks != 16384:
        tasking_suffix = "_{}".format(args.num_tasks)
    test = pyctest.test()
    test.SetName("tasking{}".format(tasking_suffix))
    test.SetProperty("WORKING_DIRECTORY", pyctest.BINARY_DIRECTORY)
    test.SetProperty(
        "ENVIRONMENT",
        test_env_settings("cpu-prof-tasking",
                          clobber=True,
                          extra="NUM_TASKS={}".format(args.num_tasks)))
    test.SetProperty("RUN_SERIAL", "ON")
    test.SetProperty("LABELS", "PTL")
    test.SetCommand(construct_command(["./tasking"], args))

    test = pyctest.test()
    test.SetName("recursive_tasking")
    test.SetProperty("WORKING_DIRECTORY", pyctest.BINARY_DIRECTORY)
    test.SetProperty("ENVIRONMENT",
                     test_env_settings("cpu-prof-recursive-tasking"))
    test.SetProperty("RUN_SERIAL", "ON")
    test.SetProperty("LABELS", "PTL")
    test.SetCommand(construct_command(["./recursive_tasking"], args))

    if args.tbb:
        test = pyctest.test()
        test.SetName("tbb_tasking{}".format(tasking_suffix))
        test.SetProperty("WORKING_DIRECTORY", pyctest.BINARY_DIRECTORY)
        test.SetProperty(
            "ENVIRONMENT",
            test_env_settings("cpu-prof-tbb-tasking",
                              extra="NUM_TASKS={}".format(args.num_tasks)))
        test.SetProperty("RUN_SERIAL", "ON")
        test.SetProperty("LABELS", "PTL")
        test.SetCommand(construct_command(["./tbb_tasking"], args))

        test = pyctest.test()
        test.SetName("recursive_tbb_tasking")
        test.SetProperty("WORKING_DIRECTORY", pyctest.BINARY_DIRECTORY)
        test.SetProperty("ENVIRONMENT",
                         test_env_settings("cpu-prof-tbb-recursive-tasking"))
        test.SetProperty("RUN_SERIAL", "ON")
        test.SetProperty("LABELS", "PTL")
        test.SetCommand(construct_command(["./recursive_tbb_tasking"], args))

    # generate the dynamic tests
    pyctest.generate_config(pyctest.BINARY_DIRECTORY)
    pyctest.generate_test_file(
        os.path.join(pyctest.BINARY_DIRECTORY, "testing"))
    pyctest.run(pyctest.ARGUMENTS, pyctest.BINARY_DIRECTORY)
예제 #17
0
directory = "./"

# these are required

pyctest.PROJECT_NAME = "GENRAY"
pyctest.SOURCE_DIRECTORY = directory
pyctest.BINARY_DIRECTORY = directory

args = helpers.ArgumentParser(pyctest.PROJECT_NAME,
                              pyctest.SOURCE_DIRECTORY,
                              pyctest.BINARY_DIRECTORY).parse_args()

pyctest.BUILD_COMMAND = "make -f makefile_gfortran64"

test = pyctest.test()
test.SetName("test-CMod-LH-edge")
test.SetProperty("WORKING_DIRECTORY","00_Genray_Regression_Tests/ci-tests/test-CMod-LH-edge")
test.SetCommand(["../test.sh"])

test = pyctest.test()
test.SetName("test-CMod-LH-edge-id16")
test.SetProperty("WORKING_DIRECTORY","00_Genray_Regression_Tests/ci-tests/test-CMod-LH-edge-id16")
test.SetCommand(["../test.sh"])

test = pyctest.test()
test.SetName("EC-ITER-Centra-CD")
test.SetProperty("WORKING_DIRECTORY","00_Genray_Regression_Tests/ci-tests/test-EC-ITER-Centra-CD")
test.SetCommand(["../test.sh"])

예제 #18
0
def run_pyctest():
    # run argparse, checkout source, copy over files
    args = configure()
    # Change the build name to somthing other than default
    pyctest.BUILD_NAME = "[{}] [{} {} {}] [Python ({}) {}]".format(
        pyctest.GetGitBranch(pyctest.SOURCE_DIRECTORY),
        platform.uname()[0],
        helpers.GetSystemVersionInfo(),
        platform.uname()[4],
        platform.python_implementation(),
        platform.python_version())
    # when coverage is enabled, we compile in debug so modify the build name
    # so that the history of test timing is not affected
    if args.coverage:
        pyctest.BUILD_NAME = "{} [coverage]".format(pyctest.BUILD_NAME)
    # remove any consecutive spaces
    while "  " in pyctest.BUILD_NAME:
        pyctest.BUILD_NAME = pyctest.BUILD_NAME.replace("  ", " ")
    # how to build the code
    pyctest.BUILD_COMMAND = "{} setup.py install".format(
        pyctest.PYTHON_EXECUTABLE)
    # generate the code coverage
    python_path = os.path.dirname(pyctest.PYTHON_EXECUTABLE)
    cover_exe = helpers.FindExePath("coverage", path=python_path)
    if args.coverage:
        gcov_cmd = helpers.FindExePath("gcov")
        if gcov_cmd is not None:
            pyctest.COVERAGE_COMMAND = "{}".format(gcov_cmd)
            pyctest.set("CTEST_COVERAGE_EXTRA_FLAGS", "-m")
            pyctest.set("CTEST_EXTRA_COVERAGE_GLOB", "{}/*.gcno".format(
                pyctest.SOURCE_DIRECTORY))
    else:
        # assign to just generate python coverage
        pyctest.COVERAGE_COMMAND = "{};xml".format(cover_exe)
    # copy over files from os.getcwd() to pyctest.BINARY_DIR
    # (implicitly copies over PyCTest{Pre,Post}Init.cmake if they exist)
    copy_files = [os.path.join("benchmarking", "pyctest_tomopy_utils.py"),
                  os.path.join("benchmarking", "pyctest_tomopy_phantom.py"),
                  os.path.join("benchmarking", "pyctest_tomopy_rec.py")]
    pyctest.copy_files(copy_files)
    # find the CTEST_TOKEN_FILE
    home = helpers.GetHomePath()
    if home is not None:
        token_path = os.path.join(home, ".tokens", "nersc-tomopy")
        if os.path.exists(token_path):
            pyctest.set("CTEST_TOKEN_FILE", token_path)
    # create a CTest that checks we imported the correct module
    test = pyctest.test()
    test.SetName("correct_module")
    test.SetCommand([pyctest.PYTHON_EXECUTABLE, "-c",
                     "\"import os, sys, tomopy; " +
                     "print('using tomopy module: {}'.format(tomopy.__file__)); " +
                     "ret=0 if os.getcwd() in tomopy.__file__ else 1; " +
                     "sys.exit(ret)\""])
    # set directory to run test
    test.SetProperty("WORKING_DIRECTORY", pyctest.BINARY_DIRECTORY)
    test.SetProperty("ENVIRONMENT", "OMP_NUM_THREADS=1")
    # create a CTest that wraps "nosetest"
    test = pyctest.test()
    test.SetName("nosetests")
    nosetest_exe = helpers.FindExePath("nosetests", path=python_path)
    if nosetest_exe is None:
        nosetest_exe = helpers.FindExePath("nosetests")
    coverage_exe = helpers.FindExePath("coverage", path=python_path)
    if coverage_exe is None:
        coverage_exe = helpers.FindExePath("coverage")
    # python $(which coverage) run $(which nosetests)
    test.SetCommand([pyctest.PYTHON_EXECUTABLE, coverage_exe, "run",
                    nosetest_exe])
    # set directory to run test
    test.SetProperty("WORKING_DIRECTORY", pyctest.BINARY_DIRECTORY)
    test.SetProperty("ENVIRONMENT", "OMP_NUM_THREADS=1")
    # Generating C code coverage is enabled
    if args.coverage:
        # if generating C code coverage, generating the Python coverage
        # needs to be put inside a test (that runs after nosetest)
        # because pyctest.COVERAGE_COMMAND is used to generate GCov files
        coverage_cmd = ""
        if platform.system() != "Windows":
            cover_cmd = os.path.join(pyctest.SOURCE_DIRECTORY,
                                     "benchmarking", "generate_coverage.sh")
            coverage_cmd = [cover_cmd, pyctest.SOURCE_DIRECTORY]
        else:
            # don't attempt GCov on Windows
            cover_cmd = helpers.FindExePath("coverage", path=python_path)
            coverage_cmd = [cover_cmd, "xml"]
        test = pyctest.test()
        test.SetName("python_coverage")
        test.SetProperty("WORKING_DIRECTORY", pyctest.BINARY_DIRECTORY)
        test.SetProperty("DEPENDS", "nosetests")
        test.SetCommand(coverage_cmd)
    # If path to globus is provided, skip when generating C coverage (too long)
    if not args.coverage and args.globus_path is not None:
        phantom = "tomo_00001"
        h5file = os.path.join(args.globus_path, phantom, phantom + ".h5")
        if not os.path.exists(h5file):
            print("Warning! HDF5 file '{}' does not exists! "
                  "Skipping test...".format(h5file))
            h5file = None
        # loop over args.algorithms and create tests for each
        for algorithm in args.algorithms:
            test = pyctest.test()
            name = "{}_{}".format(phantom, algorithm)
            # original number of iterations before num-iter added to test name
            if args.num_iter != 10:
                name = "{}_itr{}".format(name, args.num_iter)
            test.SetName(name)
            test.SetProperty("WORKING_DIRECTORY", pyctest.BINARY_DIRECTORY)
            test.SetProperty("TIMEOUT", "7200")  # 2 hour
            test.SetProperty("ENVIRONMENT", "OMP_NUM_THREADS=1")
            if h5file is None:
                test.SetCommand([pyctest.PYTHON_EXECUTABLE,
                                "-c",
                                "print(\"Path to Globus file '{}/{}.h5' not specified\")".format(
                                    phantom, phantom)])

            else:
                test.SetCommand([pyctest.PYTHON_EXECUTABLE,
                                ".//benchmarking/pyctest_tomopy_rec.py",
                                h5file,
                                "-a", algorithm,
                                "--type", "slice",
                                "-f", "jpeg",
                                "-S", "1",
                                "-c", "4",
                                "-o", "benchmarking/{}".format(name),
                                "-n", "{}".format(args.ncores),
                                "-i", "{}".format(args.num_iter)])
    # loop over args.phantoms, skip when generating C coverage (too long)
    if not args.coverage and not args.disable_phantom_tests:
        for phantom in args.phantoms:
            # create a test comparing all the args.algorithms
            test = pyctest.test()
            name = "{}_{}".format(phantom, "comparison")

            nsize = 512 if phantom != "shepp3d" else 128
            # if size customized, create unique test-name
            if args.phantom_size is not None and args.phantom_size != 512:
                nsize = (args.phantom_size if phantom != "shepp3d" else
                         int(args.phantom_size / 4))
                name = "{}_pix{}".format(name, nsize)
            # original number of iterations before num-iter added to test name
            if args.num_iter != 10:
                name = "{}_itr{}".format(name, args.num_iter)

            test.SetName(name)
            test.SetProperty("WORKING_DIRECTORY", pyctest.BINARY_DIRECTORY)
            test.SetProperty("ENVIRONMENT", "OMP_NUM_THREADS=1")
            test.SetProperty("TIMEOUT", "10800")  # 3 hours
            ncores = args.ncores
            niters = args.num_iter
            if phantom == "shepp3d":
                test.SetProperty("RUN_SERIAL", "ON")
            test.SetCommand([pyctest.PYTHON_EXECUTABLE,
                            "./benchmarking/pyctest_tomopy_phantom.py",
                            "-p", phantom,
                            "-s", "{}".format(nsize),
                            "-A", "360",
                            "-f", "jpeg",
                            "-S", "1",
                            "-n", "{}".format(ncores),
                            "-i", "{}".format(niters),
                            "--output-dir", "benchmarking/{}".format(name),
                            "--compare"] + args.algorithms)
    # generate the CTestConfig.cmake and CTestCustom.cmake
    pyctest.generate_config(pyctest.BINARY_DIRECTORY)
    # generate the CTestTestfile.cmake file
    pyctest.generate_test_file(pyctest.BINARY_DIRECTORY)
    # run CTest
    pyctest.run(pyctest.ARGUMENTS, pyctest.BINARY_DIRECTORY)
예제 #19
0
def run_pyctest():

    # ----------------------------------------------------------------------- #
    # run argparse, checkout source, copy over files
    #
    args = configure()

    # ----------------------------------------------------------------------- #
    # Compiler version
    #
    if os.environ.get("CXX") is None:
        os.environ["CXX"] = os.path.realpath(helpers.FindExePath("c++"))
    cmd = pyct.command([os.environ["CXX"], "-dumpversion"])
    cmd.SetOutputStripTrailingWhitespace(True)
    cmd.Execute()
    compiler_version = cmd.Output()

    # ----------------------------------------------------------------------- #
    # Set the build name
    #
    pyct.BUILD_NAME = "[{}] [{} {} {}] [{} {}]".format(
        pyct.GetGitBranch(pyct.SOURCE_DIRECTORY),
        platform.uname()[0],
        helpers.GetSystemVersionInfo(),
        platform.uname()[4],
        os.path.basename(os.environ["CXX"]),
        compiler_version,
    )

    # ----------------------------------------------------------------------- #
    #   build specifications
    #
    build_opts = {
        "PTL_USE_ARCH": "OFF",
        "PTL_USE_TBB": "OFF",
        "PTL_USE_SANITIZER": "OFF",
        "PTL_USE_CLANG_TIDY": "OFF",
        "PTL_USE_COVERAGE": "OFF",
        "PTL_USE_LOCKS": "ON" if args.use_locks else "OFF",
    }

    if args.tbb:
        pyct.BUILD_NAME = "{} [tbb]".format(pyct.BUILD_NAME)
        build_opts["PTL_USE_TBB"] = "ON"
    if args.arch:
        pyct.BUILD_NAME = "{} [arch]".format(pyct.BUILD_NAME)
        build_opts["PTL_USE_ARCH"] = "ON"
    if args.sanitizer:
        pyct.BUILD_NAME = "{} [{}]".format(pyct.BUILD_NAME, args.sanitizer_type)
        build_opts["PTL_USE_SANITIZER"] = "ON"
        build_opts["PTL_SANITIZER_TYPE"] = args.sanitizer_type
    if args.static_analysis:
        build_opts["PTL_USE_CLANG_TIDY"] = "ON"
    if args.coverage:
        gcov_exe = helpers.FindExePath("gcov")
        if gcov_exe is not None:
            pyct.COVERAGE_COMMAND = "{}".format(gcov_exe)
            build_opts["PTL_USE_COVERAGE"] = "ON"
            warnings.warn("Forcing build type to 'Debug' when coverage is enabled")
            pyct.BUILD_TYPE = "Debug"
    build_opts["BUILD_SHARED_LIBS"] = "ON" if "shared" in args.build_libs else "OFF"
    build_opts["BUILD_STATIC_LIBS"] = "ON" if "static" in args.build_libs else "OFF"
    pyct.BUILD_NAME = "{} [{}]".format(pyct.BUILD_NAME, pyct.BUILD_TYPE)

    # default options
    cmake_args = "-DCMAKE_BUILD_TYPE={} -DPTL_BUILD_EXAMPLES=ON".format(pyct.BUILD_TYPE)

    # customized from args
    for key, val in build_opts.items():
        cmake_args = "{} -D{}={}".format(cmake_args, key, val)

    # ----------------------------------------------------------------------- #
    # how to build the code
    #
    ctest_cmake_cmd = "${CTEST_CMAKE_COMMAND}"
    pyct.CONFIGURE_COMMAND = "{} {} {} {}".format(
        ctest_cmake_cmd, cmake_args, " ".join(pycm.ARGUMENTS), pyct.SOURCE_DIRECTORY
    )

    # ----------------------------------------------------------------------- #
    # how to build the code
    #
    pyct.BUILD_COMMAND = "{} --build {} --target all".format(
        ctest_cmake_cmd, pyct.BINARY_DIRECTORY
    )

    # ----------------------------------------------------------------------- #
    # parallel build
    #
    if not args.static_analysis:
        if platform.system() != "Windows":
            pyct.BUILD_COMMAND = "{} -- -j{} VERBOSE=1".format(
                pyct.BUILD_COMMAND, mp.cpu_count()
            )
        else:
            pyct.BUILD_COMMAND = "{} -- /MP -A x64".format(pyct.BUILD_COMMAND)

    # ----------------------------------------------------------------------- #
    # how to update the code
    #
    git_exe = helpers.FindExePath("git")
    pyct.UPDATE_COMMAND = "{}".format(git_exe)
    pyct.set("CTEST_UPDATE_TYPE", "git")
    pyct.set("CTEST_GIT_COMMAND", "{}".format(git_exe))

    # ----------------------------------------------------------------------- #
    # static analysis
    #
    clang_tidy_exe = helpers.FindExePath("clang-tidy")
    if clang_tidy_exe:
        pyct.set("CMAKE_CXX_CLANG_TIDY", "{};-checks=*".format(clang_tidy_exe))

    # ----------------------------------------------------------------------- #
    # find the CTEST_TOKEN_FILE
    #
    if args.pyctest_token_file is None and args.pyctest_token is None:
        home = helpers.GetHomePath()
        if home is not None:
            token_path = os.path.join(home, os.path.join(".tokens", "nersc-cdash"))
            if os.path.exists(token_path):
                pyct.set("CTEST_TOKEN_FILE", token_path)

    # ----------------------------------------------------------------------- #
    # construct a command
    #
    def construct_command(cmd, args):
        _cmd = []
        _cmd.extend(cmd)
        return _cmd

    # ----------------------------------------------------------------------- #
    # standard environment settings for tests, adds profile to notes
    #
    def test_env_settings(prof_fname, clobber=False, extra=""):
        return "PTL_NUM_THREADS={};CPUPROFILE={};{}".format(
            mp.cpu_count(), prof_fname, extra
        )

    # pyct.set("ENV{GCOV_PREFIX}", pyct.BINARY_DIRECTORY)
    # pyct.set("ENV{GCOV_PREFIX_STRIP}", "4")

    # ----------------------------------------------------------------------- #
    # create tests
    #
    tasking_suffix = ""
    if args.num_tasks != 65536:
        tasking_suffix = "_{}".format(args.num_tasks)
    test = pyct.test()
    test.SetName("tasking{}".format(tasking_suffix))
    test.SetProperty("WORKING_DIRECTORY", pyct.BINARY_DIRECTORY)
    test.SetProperty(
        "ENVIRONMENT",
        test_env_settings(
            "cpu-prof-tasking",
            clobber=True,
            extra="NUM_TASKS={}".format(args.num_tasks),
        ),
    )
    test.SetProperty("RUN_SERIAL", "ON")
    test.SetCommand(construct_command(["./tasking"], args))

    test = pyct.test()
    test.SetName("recursive_tasking")
    test.SetProperty("WORKING_DIRECTORY", pyct.BINARY_DIRECTORY)
    test.SetProperty("ENVIRONMENT", test_env_settings("cpu-prof-recursive-tasking"))
    test.SetProperty("RUN_SERIAL", "ON")
    test.SetCommand(construct_command(["./recursive_tasking"], args))

    test = pyct.test()
    test.SetName("minimal")
    test.SetProperty("WORKING_DIRECTORY", pyct.BINARY_DIRECTORY)
    test.SetProperty("RUN_SERIAL", "ON")
    test.SetCommand(construct_command(["./ptl-minimal"], args))

    if args.tbb:
        test = pyct.test()
        test.SetName("tbb_minimal")
        test.SetProperty("WORKING_DIRECTORY", pyct.BINARY_DIRECTORY)
        test.SetProperty("RUN_SERIAL", "ON")
        test.SetProperty("ENVIRONMENT", "PTL_USE_TBB=ON")
        test.SetCommand(construct_command(["./ptl-minimal"], args))

        test = pyct.test()
        test.SetName("tbb_tasking{}".format(tasking_suffix))
        test.SetProperty("WORKING_DIRECTORY", pyct.BINARY_DIRECTORY)
        test.SetProperty(
            "ENVIRONMENT",
            test_env_settings(
                "cpu-prof-tbb-tasking",
                extra="NUM_TASKS={}".format(args.num_tasks),
            ),
        )
        test.SetProperty("RUN_SERIAL", "ON")
        test.SetCommand(construct_command(["./tbb_tasking"], args))

        test = pyct.test()
        test.SetName("recursive_tbb_tasking")
        test.SetProperty("WORKING_DIRECTORY", pyct.BINARY_DIRECTORY)
        test.SetProperty(
            "ENVIRONMENT", test_env_settings("cpu-prof-tbb-recursive-tasking")
        )
        test.SetProperty("RUN_SERIAL", "ON")
        test.SetCommand(construct_command(["./recursive_tbb_tasking"], args))

    pyct.generate_config(pyct.BINARY_DIRECTORY)
    pyct.generate_test_file(pyct.BINARY_DIRECTORY)
    pyct.run(pyct.ARGUMENTS, pyct.BINARY_DIRECTORY)
예제 #20
0
파일: worker.py 프로젝트: LIVVkit/dashboard
def run(build_profile, pyctest_args):
    pyctest.MODEL = build_profile["cdash_section"]
    pyctest.BUILD_NAME = build_profile["build_name"]

    # Test Timeout set in build profile, otherwise default to 10 minutes
    test_timeout = build_profile.get("test_timeout", 600)

    _ready_command = ["cp"]
    for cmd in ["configure_command", "build_command", "test_command"]:
        if cmd in build_profile:
            _ready_command.append(os.path.join(_HERE, build_profile[cmd]))
    _ready_command.append(".")
    ready_machine = pyctest.command(_ready_command)

    ready_machine.SetWorkingDirectory(pyctest.BINARY_DIRECTORY)
    ready_machine.SetErrorQuiet(False)
    ready_machine.Execute()
    helpers.Cleanup(pyctest.BINARY_DIRECTORY)

    # Try again a few times if the submission fails...maybe helps the problem
    # with tests completing but not submitting to CDash, I hope?
    # Also increase the time between attempts
    pyctest.SUBMIT_RETRY_COUNT = 3
    pyctest.SUBMIT_RETRY_DELAY = 45

    if "configure_command" in build_profile:
        pyctest.CONFIGURE_COMMAND = " ".join(
            ["bash", os.path.basename(build_profile["configure_command"])]
        )
        if "config_opts" in build_profile:
            # Add options to select BISICLES / CHOMBO versions
            pyctest.CONFIGURE_COMMAND += " {bisicles} {chombo}".format(
                **build_profile["config_opts"]
            )

    if "build_command" in build_profile:
        pyctest.BUILD_COMMAND = " ".join(
            ["bash", os.path.basename(build_profile["build_command"])]
        )

    if "tests" in build_profile:
        # Check links to see where BISICLES and Chombo point to
        if "BISICLES" in pyctest.BUILD_NAME:
            _bis_build = os.readlink(
                f"{build_profile['source_directory']}/BISICLES"
            ).split("_")[-1][:-1]
            _cho_build = os.readlink(
                f"{build_profile['source_directory']}/Chombo"
            ).split("_")[-1][:-1]
            pyctest.BUILD_NAME += f"_B{_bis_build[0].upper()}_C{_cho_build[0].upper()}"
        for test in build_profile["tests"]:
            test_runner = pyctest.test(properties={"TIMEOUT": f"{test_timeout:d}"})

            # Echo tests are comma separated so two arguments are passed to the
            # bash script to avoid bash having to do string processing
            # the test will be in the yaml file as (e.g.: echo,Dome_restart_test)
            # and the test `name` would be echo_Dome_restart_test to match the old style
            # If there's no comma in the test name, the split/re-join doesn't have any effect
            test_runner.SetName("_".join(test.split(",")))

            test_runner.SetCommand(
                [
                    "bash",
                    os.path.basename(build_profile["test_command"]),
                    *test.split(","),
                ]
            )
            test_runner.SetProperty("WORKING_DIRECTORY", pyctest.BINARY_DIRECTORY)
    pyctest.run(pyctest.ARGUMENTS)
예제 #21
0
def run_pyctest():

    repo_src = os.path.join(os.getcwd(), "tomopy-src")
    repo_bin = os.path.join(os.getcwd(), "tomopy-bin")
    helpers.RemovePath(repo_src)
    helpers.RemovePath(repo_bin)

    # Get pyctest argument parser that include PyCTest arguments
    parser = helpers.ArgumentParser(project_name="TomoPy",
                                    source_dir=repo_src,
                                    binary_dir=repo_bin,
                                    python_exe=sys.executable,
                                    vcs_type="git",
                                    ctest_args=["-VV"])

    default_repo_url = "https://github.com/tomopy/tomopy.git"
    default_repo_branch = "master"

    parser.add_argument("--repo-url",
                        help="Set the repository URL",
                        type=str,
                        default=default_repo_url)
    parser.add_argument("--repo-branch",
                        help="Branch of the repository",
                        type=str,
                        default=default_repo_branch)
    args = parser.parse_args()

    # executables
    pyexe = pyctest.PYTHON_EXECUTABLE
    pyexe_dir = os.path.dirname(pyexe)
    coverage_exe = os.path.join(pyexe_dir, "coverage")
    nosetests_exe = os.path.join(pyexe_dir, "nosetests")

    # Set the build name
    pyctest.BUILD_NAME = "[{}] [{} {} {}] [Python ({}) {}]".format(
        args.repo_branch,
        platform.uname()[0], helpers.GetSystemVersionInfo(),
        platform.uname()[4], platform.python_implementation(),
        platform.python_version())
    # Set the checkout command
    pyctest.CHECKOUT_COMMAND = "git clone -b {} {} {}".format(
        args.repo_branch, args.repo_url, pyctest.SOURCE_DIRECTORY)
    # Set the configuration command (copy to binary directory)
    pyctest.CONFIGURE_COMMAND = "cmake -E copy_directory {}/ {}/".format(
        pyctest.SOURCE_DIRECTORY, pyctest.BINARY_DIRECTORY)
    # Set the build command
    pyctest.BUILD_COMMAND = "{} setup.py build".format(
        pyctest.PYTHON_EXECUTABLE)

    #--------------------------------------------------------------------------#
    # create a CTest that wraps "nosetest"
    #
    pyctest.test(name="nosetests",
                 cmd=[pyexe, coverage_exe, "run", nosetests_exe],
                 properties={
                     "WORKING_DIRECTORY": pyctest.BINARY_DIRECTORY,
                     "TIMEOUT": "600",
                     "ENVIRONMENT": "OMP_NUM_THREADS=1"
                 })

    pyctest.run()