Ejemplo n.º 1
0
def configure(machobj, output_dir, macros_format, compiler, mpilib, debug,
              sysos, unit_testing=False):
    """Add Macros, Depends, and env_mach_specific files to a directory.

    Arguments:
    machobj - Machines argument for this machine.
    output_dir - Directory in which to place output.
    macros_format - Container containing the string 'Makefile' to produce
                    Makefile Macros output, and/or 'CMake' for CMake output.
    compiler - String containing the compiler vendor to configure for.
    mpilib - String containing the MPI implementation to configure for.
    debug - Boolean specifying whether debugging options are enabled.
    unit_testing - Boolean specifying whether we're running unit tests (as
                   opposed to a system run)
    """
    # Macros generation.
    suffixes = {'Makefile': 'make', 'CMake': 'cmake'}
    macro_maker = Compilers(machobj, compiler=compiler, mpilib=mpilib)
    for form in macros_format:
        out_file_name = os.path.join(output_dir,"Macros."+suffixes[form])
        macro_maker.write_macros_file(macros_file=out_file_name, output_format=suffixes[form])

    _copy_depends_files(machobj.get_machine_name(), machobj.machines_dir, output_dir, compiler)
    _generate_env_mach_specific(output_dir, machobj, compiler, mpilib,
                                debug, sysos, unit_testing)
Ejemplo n.º 2
0
def configure(machobj,
              output_dir,
              macros_format,
              compiler,
              mpilib,
              debug,
              sysos,
              unit_testing=False):
    """Add Macros, Depends, and env_mach_specific files to a directory.

    Arguments:
    machobj - Machines argument for this machine.
    output_dir - Directory in which to place output.
    macros_format - Container containing the string 'Makefile' to produce
                    Makefile Macros output, and/or 'CMake' for CMake output.
    compiler - String containing the compiler vendor to configure for.
    mpilib - String containing the MPI implementation to configure for.
    debug - Boolean specifying whether debugging options are enabled.
    unit_testing - Boolean specifying whether we're running unit tests (as
                   opposed to a system run)
    """
    # Macros generation.
    suffixes = {'Makefile': 'make', 'CMake': 'cmake'}
    macro_maker = Compilers(machobj, compiler=compiler, mpilib=mpilib)
    for form in macros_format:
        out_file_name = os.path.join(output_dir, "Macros." + suffixes[form])
        macro_maker.write_macros_file(macros_file=out_file_name,
                                      output_format=suffixes[form])

    _copy_depends_files(machobj.get_machine_name(), machobj.machines_dir,
                        output_dir, compiler)
    _generate_env_mach_specific(output_dir, machobj, compiler, mpilib, debug,
                                sysos, unit_testing)
Ejemplo n.º 3
0
def configure(machobj,
              output_dir,
              macros_format,
              compiler,
              mpilib,
              debug,
              comp_interface,
              sysos,
              unit_testing=False,
              noenv=False,
              threaded=False,
              extra_machines_dir=None):
    """Add Macros, Depends, and env_mach_specific files to a directory.

    Arguments:
    machobj - Machines argument for this machine.
    output_dir - Directory in which to place output.
    macros_format - Container containing the string 'Makefile' to produce
                    Makefile Macros output, and/or 'CMake' for CMake output.
    compiler - String containing the compiler vendor to configure for.
    mpilib - String containing the MPI implementation to configure for.
    debug - Boolean specifying whether debugging options are enabled.
    unit_testing - Boolean specifying whether we're running unit tests (as
                   opposed to a system run)
    extra_machines_dir - String giving path to an additional directory that will be
                         searched for a config_compilers.xml file.
    """
    # Macros generation.
    suffixes = {'Makefile': 'make', 'CMake': 'cmake'}

    new_cmake_macros_dir = Files(
        comp_interface=comp_interface).get_value("CMAKE_MACROS_DIR")
    macro_maker = None
    for form in macros_format:

        if form == "CMake" and new_cmake_macros_dir is not None and os.path.exists(
                new_cmake_macros_dir
        ) and not "CIME_NO_CMAKE_MACRO" in os.environ:
            if not os.path.isfile(os.path.join(output_dir, "Macros.cmake")):
                safe_copy(os.path.join(new_cmake_macros_dir, "Macros.cmake"),
                          output_dir)
            if not os.path.exists(os.path.join(output_dir, "cmake_macros")):
                shutil.copytree(new_cmake_macros_dir,
                                os.path.join(output_dir, "cmake_macros"))

        else:
            logger.warning("Using deprecated CIME makefile generators")
            if macro_maker is None:
                macro_maker = Compilers(machobj,
                                        compiler=compiler,
                                        mpilib=mpilib,
                                        extra_machines_dir=extra_machines_dir)

            out_file_name = os.path.join(output_dir,
                                         "Macros." + suffixes[form])
            macro_maker.write_macros_file(macros_file=out_file_name,
                                          output_format=suffixes[form])

    copy_depends_files(machobj.get_machine_name(), machobj.machines_dir,
                       output_dir, compiler)
    generate_env_mach_specific(output_dir,
                               machobj,
                               compiler,
                               mpilib,
                               debug,
                               comp_interface,
                               sysos,
                               unit_testing,
                               threaded,
                               noenv=noenv)
Ejemplo n.º 4
0
def _case_setup_impl(case, caseroot, casebaseid, clean=False, test_mode=False, reset=False):
###############################################################################
    os.chdir(caseroot)
    msg = "case.setup starting"
    append_status(msg, caseroot=caseroot, sfile="CaseStatus")

    cimeroot = os.environ["CIMEROOT"]

    # Check that $DIN_LOC_ROOT exists - and abort if not a namelist compare tests
    din_loc_root = case.get_value("DIN_LOC_ROOT")
    testcase     = case.get_value("TESTCASE")
    expect(not (not os.path.isdir(din_loc_root) and testcase != "SBN"),
           "inputdata root is not a directory: \"$din_loc_root\" ")

    # Check that userdefine settings are specified before expanding variable
    for vid, value in case:
        expect(not (type(value) is str and "USERDEFINED_required_build" in value),
               "Parameter '%s' must be defined" % vid)

    # Create batch script
    if reset or clean:
        # Clean batch script

        backup_dir = "PESetupHist/b.%s" % time.strftime("%y%m%d-%H%M%S")
        if not os.path.isdir(backup_dir):
            os.makedirs(backup_dir)

        # back up relevant files
        for fileglob in ["case.run", "env_build.xml", "env_mach_pes.xml", "Macros*"]:
            for filename in glob.glob(fileglob):
                shutil.copy(filename, backup_dir)
        if os.path.exists("case.run"):
            os.remove("case.run")

        # only do the following if are NOT in testmode
        if not test_mode:
            # rebuild the models (even on restart)
            case.set_value("BUILD_COMPLETE", False)

            # backup and then clean test script
            if os.path.exists("case.test"):
                shutil.copy("case.test", backup_dir)
                os.remove("case.test")
                logger.info("Successfully cleaned test script case.test")

            if os.path.exists("case.testdriver"):
                shutil.copy("case.testdriver", backup_dir)
                os.remove("case.testdriver")
                logger.info("Successfully cleaned test script case.testdriver")

        logger.info("Successfully cleaned batch script case.run")

        logger.info("Successfully cleaned batch script case.run")
        logger.info("Some files have been saved to %s" % backup_dir)

        msg = "case.setup clean complete"
        append_status(msg, caseroot=caseroot, sfile="CaseStatus")

    if not clean:
        drv_comp = Component()
        models = drv_comp.get_valid_model_components()
        models.remove("DRV")

        mach, compiler, debug, mpilib = \
            case.get_value("MACH"), case.get_value("COMPILER"), case.get_value("DEBUG"), case.get_value("MPILIB")
        expect(mach is not None, "xml variable MACH is not set")

        # Create Macros file only if it does not exist
        if not os.path.exists("Macros"):
            logger.debug("Creating Macros file for %s" % mach)
            compilers = Compilers(compiler=compiler, machine=mach, os_=case.get_value("OS"), mpilib=mpilib)
            compilers.write_macros_file()
        else:
            logger.debug("Macros script already created ...skipping")

        # Set tasks to 1 if mpi-serial library
        if mpilib == "mpi-serial":
            for vid, value in case:
                if vid.startswith("NTASKS_") and value != 1:
                    case.set_value(vid, 1)

        # Check ninst.
        # In CIME there can be multiple instances of each component model (an ensemble) NINST is the instance of that component.
        # Save ninst in a dict to use later in apply_user_mods
        ninst = dict()
        for comp in models:
            comp_model = case.get_value("COMP_%s" % comp)
            ninst[comp_model]  = case.get_value("NINST_%s" % comp)
            ntasks = case.get_value("NTASKS_%s" % comp)
            if ninst[comp_model] > ntasks:
                if ntasks == 1:
                    case.set_value("NTASKS_%s" % comp, ninst[comp_model])
                else:
                    expect(False, "NINST_%s value %d greater than NTASKS_%s %d" % (comp, ninst[comp_model], comp, ntasks))

        expect(not (case.get_value("BUILD_THREADED") and compiler == "nag"),
               "it is not possible to run with OpenMP if using the NAG Fortran compiler")

        if os.path.exists("case.run"):
            logger.info("Machine/Decomp/Pes configuration has already been done ...skipping")
        else:
            _check_pelayouts_require_rebuild(case, models)

            if os.path.exists("LockedFiles/env_build.xml"):
                os.remove("LockedFiles/env_build.xml")

            case.flush()
            check_lockedfiles()

            tm = TaskMaker(case)
            mtpn = case.get_value("MAX_TASKS_PER_NODE")
            pespn = case.get_value("PES_PER_NODE")
            # This is hardcoded because on yellowstone by default we
            # run with 15 pes per node
            # but pay for 16 pes per node.  See github issue #518
            if case.get_value("MACH") == "yellowstone":
                pespn = 16
            pestot = tm.totaltasks
            if mtpn > pespn:
                pestot = pestot * (mtpn // pespn)
                case.set_value("COST_PES", tm.num_nodes*pespn)
            else:
                # reset cost_pes to totalpes
                case.set_value("COST_PES", 0)

            case.set_value("TOTALPES", pestot)

            # Compute cost based on PE count
            pval = 1
            pcnt = 0
            while pval < pestot:
                pval *= 2
                pcnt += 6 # (scaling like sqrt(6/10))
            pcost = 3 - pcnt / 10 # (3 is 64 with 6)

            # Compute cost based on DEBUG
            dcost = 3 if debug else 0

            # Compute cost based on run length
            # For simplicity, we use a heuristic just based on STOP_OPTION (not considering
            # STOP_N), and only deal with options longer than ndays
            lcost = 0
            if "nmonth" in case.get_value("STOP_OPTION"):
                # N months costs 30x as much as N days; since cost is based on log-base-2, add 5
                lcost = 5
            elif "nyear" in case.get_value("STOP_OPTION"):
                # N years costs 365x as much as N days; since cost is based on log-base-2, add 9
                lcost = 9

            estcost = pcost + dcost + lcost
            for cost in ["CCSM_CCOST", "CCSM_GCOST", "CCSM_TCOST", "CCSM_CCOST"]:
                estcost += case.get_value(cost)

            case.set_value("CCSM_PCOST", pcost)
            case.set_value("CCSM_ESTCOST", estcost)

            # create batch file
            logger.info("Creating batch script case.run")

            # Use BatchFactory to get the appropriate instance of a BatchMaker,
            # use it to create our batch scripts
            env_batch = case.get_env("batch")
            for job in env_batch.get_jobs():
                input_batch_script  = os.path.join(case.get_value("MACHDIR"), env_batch.get_value('template', subgroup=job))
                if job == "case.test" and testcase is not None and not test_mode:
                    logger.info("Writing %s script" % job)
                    testscript = os.path.join(cimeroot, "scripts", "Testing", "Testcases", "%s_script" % testcase)
                    # Short term fix to be removed when csh tests are removed
                    if not os.path.exists(testscript):
                        env_batch.make_batch_script(input_batch_script, job, case)
                elif job != "case.test":
                    logger.info("Writing %s script" % job)
                    env_batch.make_batch_script(input_batch_script, job, case)

            # Make a copy of env_mach_pes.xml in order to be able
            # to check that it does not change once case.setup is invoked
            logger.info("Locking file env_mach_pes.xml")
            case.flush()
            shutil.copy("env_mach_pes.xml", "LockedFiles")

        # Create user_nl files for the required number of instances
        if not os.path.exists("user_nl_cpl"):
            logger.info("Creating user_nl_xxx files for components and cpl")
        # loop over models
        for model in models:
            comp = case.get_value("COMP_%s" % model)
            logger.info("Building %s usernl files"%model)
            _build_usernl_files(case, model, comp)
            if comp == "cism":
                run_cmd_no_fail("%s/../components/cism/cime_config/cism.template %s" % (cimeroot, caseroot))

        _build_usernl_files(case, "drv", "cpl")

        user_mods_path = case.get_value("USER_MODS_FULLPATH")
        if user_mods_path is not None:
            apply_user_mods(caseroot, user_mods_path=user_mods_path, ninst=ninst)
        elif case.get_value("TEST"):
            test_mods = parse_test_name(casebaseid)[6]
            if test_mods is not None:
                user_mods_path = os.path.join(case.get_value("TESTS_MODS_DIR"), test_mods)
                apply_user_mods(caseroot, user_mods_path=user_mods_path, ninst=ninst)


        # Run preview namelists for scripts
        logger.info("preview_namelists")
        preview_namelists(case)

        logger.info("See ./CaseDoc for component namelists")
        logger.info("If an old case build already exists, might want to run \'case.build --clean\' before building")

        # Create test script if appropriate
        # Short term fix to be removed when csh tests are removed
        if os.path.exists("env_test.xml"):
            if not os.path.exists("case.test"):
                logger.info("Starting testcase.setup")
                run_cmd_no_fail("./testcase.setup -caseroot %s" % caseroot)
                logger.info("Finished testcase.setup")

        msg = "case.setup complete"
        append_status(msg, caseroot=caseroot, sfile="CaseStatus")

        # Record env information
        env_module = case.get_env("mach_specific")
        env_module.make_env_mach_specific_file(compiler, debug, mpilib, "sh")
        env_module.make_env_mach_specific_file(compiler, debug, mpilib, "csh")
        with open("software_environment.txt", "w") as f:
            f.write(env_module.list_modules())
        run_cmd_no_fail("echo -e '\n' >> software_environment.txt && \
                         env >> software_environment.txt")
Ejemplo n.º 5
0
def configure(
    machobj,
    output_dir,
    macros_format,
    compiler,
    mpilib,
    debug,
    comp_interface,
    sysos,
    unit_testing=False,
    noenv=False,
    threaded=False,
    extra_machines_dir=None,
):
    """Add Macros, Depends, and env_mach_specific files to a directory.

    Arguments:
    machobj - Machines argument for this machine.
    output_dir - Directory in which to place output.
    macros_format - Container containing the string 'Makefile' to produce
                    Makefile Macros output, and/or 'CMake' for CMake output.
    compiler - String containing the compiler vendor to configure for.
    mpilib - String containing the MPI implementation to configure for.
    debug - Boolean specifying whether debugging options are enabled.
    unit_testing - Boolean specifying whether we're running unit tests (as
                   opposed to a system run)
    extra_machines_dir - String giving path to an additional directory that will be
                         searched for a config_compilers.xml file.
    """
    # Macros generation.
    suffixes = {"Makefile": "make", "CMake": "cmake"}

    new_cmake_macros_dir = Files(
        comp_interface=comp_interface).get_value("CMAKE_MACROS_DIR")
    macro_maker = None
    for form in macros_format:

        if (new_cmake_macros_dir is not None
                and os.path.exists(new_cmake_macros_dir)
                and not "CIME_NO_CMAKE_MACRO" in os.environ):

            if not os.path.isfile(os.path.join(output_dir, "Macros.cmake")):
                safe_copy(os.path.join(new_cmake_macros_dir, "Macros.cmake"),
                          output_dir)
            if not os.path.exists(os.path.join(output_dir, "cmake_macros")):
                shutil.copytree(new_cmake_macros_dir,
                                os.path.join(output_dir, "cmake_macros"))

            # Grab macros from extra machine dir if it was provided
            if extra_machines_dir:
                extra_cmake_macros = glob.glob(
                    "{}/cmake_macros/*.cmake".format(extra_machines_dir))
                for extra_cmake_macro in extra_cmake_macros:
                    safe_copy(extra_cmake_macro, new_cmake_macros_dir)

            if form == "Makefile":
                # Use the cmake macros to generate the make macros
                cmake_args = " -DOS={} -DMACH={} -DCOMPILER={} -DDEBUG={} -DMPILIB={} -Dcompile_threaded={} -DCASEROOT={}".format(
                    sysos,
                    machobj.get_machine_name(),
                    compiler,
                    stringify_bool(debug),
                    mpilib,
                    stringify_bool(threaded),
                    output_dir,
                )

                with CmakeTmpBuildDir(macroloc=output_dir) as cmaketmp:
                    output = cmaketmp.get_makefile_vars(cmake_args=cmake_args)

                with open(os.path.join(output_dir, "Macros.make"), "w") as fd:
                    fd.write(output)

        else:
            logger.warning("Using deprecated CIME makefile generators")
            if macro_maker is None:
                macro_maker = Compilers(
                    machobj,
                    compiler=compiler,
                    mpilib=mpilib,
                    extra_machines_dir=extra_machines_dir,
                )

            out_file_name = os.path.join(output_dir,
                                         "Macros." + suffixes[form])
            macro_maker.write_macros_file(macros_file=out_file_name,
                                          output_format=suffixes[form])

    copy_depends_files(machobj.get_machine_name(), machobj.machines_dir,
                       output_dir, compiler)
    generate_env_mach_specific(
        output_dir,
        machobj,
        compiler,
        mpilib,
        debug,
        comp_interface,
        sysos,
        unit_testing,
        threaded,
        noenv=noenv,
    )
Ejemplo n.º 6
0
def _case_setup_impl(case, caseroot, casebaseid, clean=False, test_mode=False, reset=False):
###############################################################################
    os.chdir(caseroot)
    msg = "case.setup starting"
    append_status(msg, caseroot=caseroot, sfile="CaseStatus")

    cimeroot = get_cime_root(case)

    # Check that $DIN_LOC_ROOT exists - and abort if not a namelist compare tests
    din_loc_root = case.get_value("DIN_LOC_ROOT")
    testcase     = case.get_value("TESTCASE")
    expect(not (not os.path.isdir(din_loc_root) and testcase != "SBN"),
           "inputdata root is not a directory: \"$din_loc_root\" ")

    # Check that userdefine settings are specified before expanding variable
    for vid, value in case:
        expect(not (type(value) is str and "USERDEFINED_required_build" in value),
               "Parameter '%s' must be defined" % vid)

    # Create batch script
    if reset or clean:
        # Clean batch script

        backup_dir = "PESetupHist/b.%s" % time.strftime("%y%m%d-%H%M%S")
        if not os.path.isdir(backup_dir):
            os.makedirs(backup_dir)

        # back up relevant files
        for fileglob in ["case.run", "env_build.xml", "env_mach_pes.xml", "Macros*"]:
            for filename in glob.glob(fileglob):
                shutil.copy(filename, backup_dir)
        if os.path.exists("case.run"):
            os.remove("case.run")

        # only do the following if are NOT in testmode
        if not test_mode:
            # rebuild the models (even on restart)
            case.set_value("BUILD_COMPLETE", False)

            # backup and then clean test script
            if os.path.exists("case.test"):
                shutil.copy("case.test", backup_dir)
                os.remove("case.test")
                logger.info("Successfully cleaned test script case.test")

            if os.path.exists("case.testdriver"):
                shutil.copy("case.testdriver", backup_dir)
                os.remove("case.testdriver")
                logger.info("Successfully cleaned test script case.testdriver")

        logger.info("Successfully cleaned batch script case.run")

        logger.info("Successfully cleaned batch script case.run")
        logger.info("Some files have been saved to %s" % backup_dir)

        msg = "case.setup clean complete"
        append_status(msg, caseroot=caseroot, sfile="CaseStatus")

    if not clean:
        models = case.get_values("COMP_CLASSES")

        mach, compiler, debug, mpilib = \
            case.get_value("MACH"), case.get_value("COMPILER"), case.get_value("DEBUG"), case.get_value("MPILIB")
        expect(mach is not None, "xml variable MACH is not set")

        # Create Macros file only if it does not exist
        if not os.path.exists("Macros"):
            logger.debug("Creating Macros file for %s" % mach)
            compilers = Compilers(compiler=compiler, machine=mach, os_=case.get_value("OS"), mpilib=mpilib)
            compilers.write_macros_file()
        else:
            logger.debug("Macros script already created ...skipping")

        # Set tasks to 1 if mpi-serial library
        if mpilib == "mpi-serial":
            for vid, value in case:
                if vid.startswith("NTASKS_") and value != 1:
                    case.set_value(vid, 1)

        # Check ninst.
        # In CIME there can be multiple instances of each component model (an ensemble) NINST is the instance of that component.
        # Save ninst in a dict to use later in apply_user_mods
        ninst = dict()
        for comp in models:
            if comp == "DRV":
                continue
            comp_model = case.get_value("COMP_%s" % comp)
            ninst[comp_model]  = case.get_value("NINST_%s" % comp)
            ntasks = case.get_value("NTASKS_%s" % comp)
            if ninst[comp_model] > ntasks:
                if ntasks == 1:
                    case.set_value("NTASKS_%s" % comp, ninst[comp_model])
                else:
                    expect(False, "NINST_%s value %d greater than NTASKS_%s %d" % (comp, ninst[comp_model], comp, ntasks))

        if os.path.exists("case.run"):
            logger.info("Machine/Decomp/Pes configuration has already been done ...skipping")
        else:
            _check_pelayouts_require_rebuild(case, models)

            if os.path.exists("LockedFiles/env_build.xml"):
                os.remove("LockedFiles/env_build.xml")

            case.flush()
            check_lockedfiles()
            env_mach_pes = case.get_env("mach_pes")
            pestot = env_mach_pes.get_total_tasks(models)
            logger.debug("at update TOTALPES = %s"%pestot)
            case.set_value("TOTALPES", pestot)
            thread_count = env_mach_pes.get_max_thread_count(models)
            if thread_count > 1:
                case.set_value("BUILD_THREADED", True)

            expect(not (case.get_value("BUILD_THREADED")  and compiler == "nag"),
                   "it is not possible to run with OpenMP if using the NAG Fortran compiler")


            cost_pes = env_mach_pes.get_cost_pes(pestot, thread_count, machine=case.get_value("MACH"))
            case.set_value("COST_PES", cost_pes)

            # create batch file
            logger.info("Creating batch script case.run")

            # Use BatchFactory to get the appropriate instance of a BatchMaker,
            # use it to create our batch scripts
            env_batch = case.get_env("batch")
            num_nodes = env_mach_pes.get_total_nodes(pestot, thread_count)
            tasks_per_node = env_mach_pes.get_tasks_per_node(pestot, thread_count)
            for job in env_batch.get_jobs():
                input_batch_script  = os.path.join(case.get_value("MACHDIR"), env_batch.get_value('template', subgroup=job))
                if job == "case.test" and testcase is not None and not test_mode:
                    logger.info("Writing %s script" % job)
                    testscript = os.path.join(cimeroot, "scripts", "Testing", "Testcases", "%s_script" % testcase)
                    # Short term fix to be removed when csh tests are removed
                    if not os.path.exists(testscript):
                        env_batch.make_batch_script(input_batch_script, job, case, pestot, tasks_per_node, num_nodes, thread_count)
                elif job != "case.test":
                    logger.info("Writing %s script from input template %s" % (job, input_batch_script))
                    env_batch.make_batch_script(input_batch_script, job, case, pestot, tasks_per_node, num_nodes, thread_count)

            # Make a copy of env_mach_pes.xml in order to be able
            # to check that it does not change once case.setup is invoked
            logger.info("Locking file env_mach_pes.xml")
            case.flush()
            logger.debug("at copy TOTALPES = %s"%case.get_value("TOTALPES"))
            shutil.copy("env_mach_pes.xml", "LockedFiles")

        # Create user_nl files for the required number of instances
        if not os.path.exists("user_nl_cpl"):
            logger.info("Creating user_nl_xxx files for components and cpl")
        # loop over models
        for model in models:
            comp = case.get_value("COMP_%s" % model)
            logger.info("Building %s usernl files"%model)
            _build_usernl_files(case, model, comp)
            if comp == "cism":
                run_cmd_no_fail("%s/../components/cism/cime_config/cism.template %s" % (cimeroot, caseroot))

        _build_usernl_files(case, "drv", "cpl")

        user_mods_path = case.get_value("USER_MODS_FULLPATH")
        if user_mods_path is not None:
            apply_user_mods(caseroot, user_mods_path=user_mods_path, ninst=ninst)
        elif case.get_value("TEST"):
            test_mods = parse_test_name(casebaseid)[6]
            if test_mods is not None:
                user_mods_path = os.path.join(case.get_value("TESTS_MODS_DIR"), test_mods)
                apply_user_mods(caseroot, user_mods_path=user_mods_path, ninst=ninst)


        # Run preview namelists for scripts
        logger.info("preview_namelists")
        preview_namelists(case)

        logger.info("See ./CaseDoc for component namelists")
        logger.info("If an old case build already exists, might want to run \'case.build --clean\' before building")

        # Create test script if appropriate
        # Short term fix to be removed when csh tests are removed
        if os.path.exists("env_test.xml"):
            if not os.path.exists("case.test"):
                logger.info("Starting testcase.setup")
                run_cmd_no_fail("./testcase.setup -caseroot %s" % caseroot)
                logger.info("Finished testcase.setup")

        msg = "case.setup complete"
        append_status(msg, caseroot=caseroot, sfile="CaseStatus")

        # Record env information
        env_module = case.get_env("mach_specific")
        env_module.make_env_mach_specific_file(compiler, debug, mpilib, "sh")
        env_module.make_env_mach_specific_file(compiler, debug, mpilib, "csh")
        with open("software_environment.txt", "w") as f:
            f.write(env_module.list_modules())
        run_cmd_no_fail("echo -e '\n' >> software_environment.txt && \
                         env >> software_environment.txt")